From 68be76766e4a23c1eaf8f9d625d515fb577603b8 Mon Sep 17 00:00:00 2001 From: noobyu6 <1044510784@qq.com> Date: Mon, 20 Jun 2022 17:25:10 +0800 Subject: [PATCH 0001/1525] class field can be replaced by local variable --- .../api/datadeposit/SWORDv2ContainerServlet.java | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java index d8ba8eec4ca..5e3e6937721 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java @@ -17,9 +17,9 @@ public class SWORDv2ContainerServlet extends SwordServlet { ContainerManagerImpl containerManagerImpl; @Inject StatementManagerImpl statementManagerImpl; - private ContainerManager cm; +// private ContainerManager cm; this field can be replaced by local variable private ContainerAPI api; - private StatementManager sm; +// private StatementManager sm; this field can be replaced by local variable private final ReentrantLock lock = new ReentrantLock(); @@ -28,13 +28,16 @@ public void init() throws ServletException { super.init(); // load the container manager implementation - this.cm = containerManagerImpl; +// this.cm = containerManagerImpl; + ContainerManager cm = containerManagerImpl; // load the statement manager implementation - this.sm = statementManagerImpl; +// this.sm = statementManagerImpl; + StatementManager sm = statementManagerImpl; // initialise the underlying servlet processor - this.api = new ContainerAPI(this.cm, this.sm, this.config); +// this.api = new ContainerAPI(this.cm, this.sm, this.config); + this.api = new ContainerAPI(cm, sm, this.config); } @Override From a4354f1f8666cd95671cc21cc218885064e21567 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 8 Sep 2022 16:02:08 -0400 Subject: [PATCH 0002/1525] Revert "class field can be replaced by local variable" --- .../api/datadeposit/SWORDv2ContainerServlet.java | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java index 5e3e6937721..d8ba8eec4ca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java @@ -17,9 +17,9 @@ public class SWORDv2ContainerServlet extends SwordServlet { ContainerManagerImpl containerManagerImpl; @Inject StatementManagerImpl statementManagerImpl; -// private ContainerManager cm; this field can be replaced by local variable + private ContainerManager cm; private ContainerAPI api; -// private StatementManager sm; this field can be replaced by local variable + private StatementManager sm; private final ReentrantLock lock = new ReentrantLock(); @@ -28,16 +28,13 @@ public void init() throws ServletException { super.init(); // load the container manager implementation -// this.cm = containerManagerImpl; - ContainerManager cm = containerManagerImpl; + this.cm = containerManagerImpl; // load the statement manager implementation -// this.sm = statementManagerImpl; - StatementManager sm = statementManagerImpl; + this.sm = statementManagerImpl; // initialise the underlying servlet processor -// this.api = new ContainerAPI(this.cm, this.sm, this.config); - this.api = new ContainerAPI(cm, sm, this.config); + this.api = new ContainerAPI(this.cm, this.sm, this.config); } @Override From 5b3406551b2385abe4efa1b6320243d64de54030 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 29 Sep 2022 11:47:27 +0200 Subject: [PATCH 0003/1525] added api-direct-upload option for storage configurations --- doc/release-notes/4.20-release-notes.md | 11 +++++++++-- doc/sphinx-guides/source/installation/config.rst | 1 + .../harvard/iq/dataverse/dataaccess/StorageIO.java | 3 ++- .../java/edu/harvard/iq/dataverse/util/FileUtil.java | 3 ++- .../edu/harvard/iq/dataverse/util/SystemConfig.java | 3 ++- 5 files changed, 16 insertions(+), 5 deletions(-) diff --git a/doc/release-notes/4.20-release-notes.md b/doc/release-notes/4.20-release-notes.md index e29953db101..8044047134f 100644 --- a/doc/release-notes/4.20-release-notes.md +++ b/doc/release-notes/4.20-release-notes.md @@ -90,10 +90,16 @@ Also note that the :MaxFileUploadSizeInBytes property has a new option to provid ### Direct S3 Upload Changes -Direct upload to S3 is enabled per store by one new jvm option: +Direct upload to S3 in UI and API is enabled per store by one new jvm option: ./asadmin create-jvm-options "\-Ddataverse.files..upload-redirect=true" - + +That option makes direct upload by default in UI, in the API you can use either: direct or through Dataverse upload. Direct upload to S3 in API only is enabled per store by this new jvm option: + + ./asadmin create-jvm-options "\-Ddataverse.files..api-direct-upload=true" + +That option leaves through Dataverse upload by default in UI, but makes both: through Dataverse and direct uploads possible via API. + The existing :MaxFileUploadSizeInBytes property and ```dataverse.files..url-expiration-minutes``` jvm option for the same store also apply to direct upload. Direct upload via the Dataverse web interface is transparent to the user and handled automatically by the browser. Some minor differences in file upload exist: directly uploaded files are not unzipped and Dataverse does not scan their content to help in assigning a MIME type. Ingest of tabular files and metadata extraction from FITS files will occur, but can be turned off for files above a specified size limit through the new dataverse.files..ingestsizelimit jvm option. @@ -127,6 +133,7 @@ We made changes to the JSON Export in this release (Issue 6650, PR #6669). If yo - The JVM option dataverse.files.file.directory= controls where temporary files are stored (in the /temp subdir of the defined directory), independent of the location of any 'file' store defined above. - The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset to the S3 bucket. (S3 stores only!) +- The JVM option dataverse.files..api-direct-upload enables direct upload of files added to a dataset to any storage. (Through API only and when the uploading tool has direct access to the storage used; i.e., uplad the file first and register it through API!) - The JVM option dataverse.files..MaxFileUploadSizeInBytes controls the maximum size of file uploads allowed for the given file store. - The JVM option dataverse.files..ingestsizelimit controls the maximum size of files for which ingest will be attempted, for the given file store. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index f2de9d5702f..2b605ae8945 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -546,6 +546,7 @@ List of S3 Storage Options dataverse.files..bucket-name The bucket name. See above. (none) dataverse.files..download-redirect ``true``/``false`` Enable direct download or proxy through Dataverse. ``false`` dataverse.files..upload-redirect ``true``/``false`` Enable direct upload of files added to a dataset to the S3 store. ``false`` + dataverse.files..api-direct-upload ``true``/``false`` Enable direct upload of files added to a dataset through API only. ``false`` dataverse.files..ingestsizelimit Maximum size of directupload files that should be ingested (none) dataverse.files..url-expiration-minutes If direct uploads/downloads: time until links expire. Optional. 60 dataverse.files..min-part-size Multipart direct uploads will occur for files larger than this. Optional. ``1024**3`` diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 90e4a54dbe8..8e2dd9fa961 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -605,7 +605,8 @@ public static String getDriverPrefix(String driverId) { } public static boolean isDirectUploadEnabled(String driverId) { - return Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")); + return Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".api-direct-upload")); } //Check that storageIdentifier is consistent with store's config diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 339de904f9e..0a41da4f7dd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1748,7 +1748,8 @@ public static boolean isPackageFile(DataFile dataFile) { public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) { String driverId = dataset.getEffectiveStorageDriverId(); - boolean directEnabled = Boolean.getBoolean("dataverse.files." + driverId + ".upload-redirect"); + boolean directEnabled = Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".api-direct-upload")); //Should only be requested when it is allowed, but we'll log a warning otherwise if(!directEnabled) { logger.warning("Direct upload not supported for files in this dataset: " + dataset.getId()); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 7abd0d02065..4553a71a1d2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1112,7 +1112,8 @@ public boolean isDatafileValidationOnPublishEnabled() { } public boolean directUploadEnabled(DvObjectContainer container) { - return Boolean.getBoolean("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect"); + // this method is used in UI only, therfore "dataverse.files." + driverId + ".api-direct-upload" is not used here + return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); } public String getDataCiteRestApiUrlString() { From 5db560e999454a0b974215c6d9bc8373d4595fc0 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 29 Sep 2022 13:43:33 +0200 Subject: [PATCH 0004/1525] improvements in the documentation --- doc/release-notes/4.20-release-notes.md | 8 ++++---- doc/sphinx-guides/source/installation/config.rst | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/release-notes/4.20-release-notes.md b/doc/release-notes/4.20-release-notes.md index 8044047134f..79037d8cd8c 100644 --- a/doc/release-notes/4.20-release-notes.md +++ b/doc/release-notes/4.20-release-notes.md @@ -94,11 +94,11 @@ Direct upload to S3 in UI and API is enabled per store by one new jvm option: ./asadmin create-jvm-options "\-Ddataverse.files..upload-redirect=true" -That option makes direct upload by default in UI, in the API you can use either: direct or through Dataverse upload. Direct upload to S3 in API only is enabled per store by this new jvm option: +This option makes direct upload the default in the UI. In the API, you can use either: direct upload or upload via Dataverse upload. Direct upload to S3 in API only is enabled per store by this new jvm option: ./asadmin create-jvm-options "\-Ddataverse.files..api-direct-upload=true" -That option leaves through Dataverse upload by default in UI, but makes both: through Dataverse and direct uploads possible via API. +That option leaves via Dataverse upload by default in UI, but makes both: uploads via Dataverse and direct uploads possible via API. The existing :MaxFileUploadSizeInBytes property and ```dataverse.files..url-expiration-minutes``` jvm option for the same store also apply to direct upload. @@ -132,8 +132,8 @@ We made changes to the JSON Export in this release (Issue 6650, PR #6669). If yo ## New JVM Options for file storage drivers - The JVM option dataverse.files.file.directory= controls where temporary files are stored (in the /temp subdir of the defined directory), independent of the location of any 'file' store defined above. -- The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset to the S3 bucket. (S3 stores only!) -- The JVM option dataverse.files..api-direct-upload enables direct upload of files added to a dataset to any storage. (Through API only and when the uploading tool has direct access to the storage used; i.e., uplad the file first and register it through API!) +- The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset in the S3 bucket. (S3 stores only!) +- The JVM option dataverse.files..api-direct-upload enables direct upload of files added to a dataset in any storage. (Via API only and when the uploading tool has direct access to the relevant storage used; i.e., upload the file first and register it via API!) - The JVM option dataverse.files..MaxFileUploadSizeInBytes controls the maximum size of file uploads allowed for the given file store. - The JVM option dataverse.files..ingestsizelimit controls the maximum size of files for which ingest will be attempted, for the given file store. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 2b605ae8945..3245aeccfaf 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -545,8 +545,8 @@ List of S3 Storage Options dataverse.files..label **Required** label to be shown in the UI for this storage (none) dataverse.files..bucket-name The bucket name. See above. (none) dataverse.files..download-redirect ``true``/``false`` Enable direct download or proxy through Dataverse. ``false`` - dataverse.files..upload-redirect ``true``/``false`` Enable direct upload of files added to a dataset to the S3 store. ``false`` - dataverse.files..api-direct-upload ``true``/``false`` Enable direct upload of files added to a dataset through API only. ``false`` + dataverse.files..upload-redirect ``true``/``false`` Enable direct upload of files added to a dataset in the S3 store. ``false`` + dataverse.files..api-direct-upload ``true``/``false`` Enable direct upload of files added to a dataset via API only. ``false`` dataverse.files..ingestsizelimit Maximum size of directupload files that should be ingested (none) dataverse.files..url-expiration-minutes If direct uploads/downloads: time until links expire. Optional. 60 dataverse.files..min-part-size Multipart direct uploads will occur for files larger than this. Optional. ``1024**3`` From c7cfb533b261b7e36afd78b2061d48d2f55c4736 Mon Sep 17 00:00:00 2001 From: bencomp Date: Mon, 3 Oct 2022 01:17:42 +0200 Subject: [PATCH 0005/1525] Remove unused GPL-licensed code For unknown reasons, in 2009 several files from the JDK were copied into the Dataverse codebase, instead of referenced. It appears that these classes weren't really used. --- .../spi/FileMetadataExtractorSpi.java | 18 - .../ingest/plugin/spi/DigraphNode.java | 188 ---- .../plugin/spi/IngestServiceProvider.java | 25 +- .../plugin/spi/PartiallyOrderedSet.java | 241 ----- .../plugin/spi/RegisterableService.java | 90 -- .../ingest/plugin/spi/ServiceRegistry.java | 861 ------------------ 6 files changed, 1 insertion(+), 1422 deletions(-) delete mode 100644 src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/DigraphNode.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/PartiallyOrderedSet.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/RegisterableService.java delete mode 100644 src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/ServiceRegistry.java diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/spi/FileMetadataExtractorSpi.java b/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/spi/FileMetadataExtractorSpi.java index ab8f610cb06..a30dfafe67f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/spi/FileMetadataExtractorSpi.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/spi/FileMetadataExtractorSpi.java @@ -10,9 +10,7 @@ import java.util.logging.*; import java.io.*; -import edu.harvard.iq.dataverse.ingest.plugin.spi.RegisterableService; import edu.harvard.iq.dataverse.ingest.plugin.spi.IngestServiceProvider; -import edu.harvard.iq.dataverse.ingest.plugin.spi.ServiceRegistry; import java.nio.MappedByteBuffer; import java.util.Locale; @@ -44,22 +42,6 @@ public FileMetadataExtractorSpi(String vendorName, String version) { this.version = version; } - public void onRegistration(ServiceRegistry registry, - Class category) {} - - - public void onDeregistration(ServiceRegistry registry, - Class category) {} - - public String getVersion() { - return version; - } - - public String getVendorName() { - return vendorName; - } - - public abstract String getDescription(Locale locale); protected String[] names = null; diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/DigraphNode.java b/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/DigraphNode.java deleted file mode 100644 index 4db48b5c06a..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/DigraphNode.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - Copyright (C) 2005-2012, by the President and Fellows of Harvard College. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - Dataverse Network - A web application to share, preserve and analyze research data. - Developed at the Institute for Quantitative Social Science, Harvard University. - Version 3.0. -*/ - -package edu.harvard.iq.dataverse.ingest.plugin.spi; - -// This file was Taken out from openjdk-6-src-b16-24_apr_2009.tar.gz -// http://download.java.net/openjdk/jdk6/promoted/b16/openjdk-6-src-b16-24_apr_2009.tar.gz -// downloaded: 2009-05-07 - - -/* - * Copyright 2000 Sun Microsystems, Inc. All Rights Reserved. - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. - * - * This code is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License version 2 only, as - * published by the Free Software Foundation. Sun designates this - * particular file as subject to the "Classpath" exception as provided - * by Sun in the LICENSE file that accompanied this code. - * - * This code is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or - * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License - * version 2 for more details (a copy is included in the LICENSE file that - * accompanied this code). - * - * You should have received a copy of the GNU General Public License version - * 2 along with this work; if not, write to the Free Software Foundation, - * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. - * - * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, - * CA 95054 USA or visit www.sun.com if you need additional information or - * have any questions. - */ - -//package javax.imageio.spi; - -import java.io.Serializable; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Set; - -/** - * A node in a directed graph. In addition to an arbitrary - * Object containing user data associated with the node, - * each node maintains a Sets of nodes which are pointed - * to by the current node (available from getOutNodes). - * The in-degree of the node (that is, number of nodes that point to - * the current node) may be queried. - * - */ -class DigraphNode implements Cloneable, Serializable { - - /** The data associated with this node. */ - protected Object data; - - /** - * A Set of neighboring nodes pointed to by this - * node. - */ - protected Set outNodes = new HashSet(); - - /** The in-degree of the node. */ - protected int inDegree = 0; - - /** - * A Set of neighboring nodes that point to this - * node. - */ - private Set inNodes = new HashSet(); - - public DigraphNode(Object data) { - this.data = data; - } - - /** Returns the Object referenced by this node. */ - public Object getData() { - return data; - } - - /** - * Returns an Iterator containing the nodes pointed - * to by this node. - */ - public Iterator getOutNodes() { - return outNodes.iterator(); - } - - /** - * Adds a directed edge to the graph. The outNodes list of this - * node is updated and the in-degree of the other node is incremented. - * - * @param node a DigraphNode. - * - * @return true if the node was not previously the - * target of an edge. - */ - public boolean addEdge(DigraphNode node) { - if (outNodes.contains(node)) { - return false; - } - - outNodes.add(node); - node.inNodes.add(this); - node.incrementInDegree(); - return true; - } - - /** - * Returns true if an edge exists between this node - * and the given node. - * - * @param node a DigraphNode. - * - * @return true if the node is the target of an edge. - */ - public boolean hasEdge(DigraphNode node) { - return outNodes.contains(node); - } - - /** - * Removes a directed edge from the graph. The outNodes list of this - * node is updated and the in-degree of the other node is decremented. - * - * @return true if the node was previously the target - * of an edge. - */ - public boolean removeEdge(DigraphNode node) { - if (!outNodes.contains(node)) { - return false; - } - - outNodes.remove(node); - node.inNodes.remove(this); - node.decrementInDegree(); - return true; - } - - /** - * Removes this node from the graph, updating neighboring nodes - * appropriately. - */ - public void dispose() { - Object[] inNodesArray = inNodes.toArray(); - for(int i=0; iServiceRegistry. - * - * @param registry the ServiceRegistry instance. - * @param category a Class object that indicatges - * its registry category under which this object has been registered. - * category. - */ - public void onRegistration(ServiceRegistry registry, - Class category) {} - - /** - * A callback whenever this Spi class is deregistered from - * a ServiceRegistry. - * - * @param registry the ServiceRegistry instance. - * @param category a Class object that indicatges - * its registry category from which this object is being de-registered. - */ - public void onDeregistration(ServiceRegistry registry, - Class category) {} /** * Gets the value of the version field. * diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/PartiallyOrderedSet.java b/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/PartiallyOrderedSet.java deleted file mode 100644 index 87f4f57cdb6..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/PartiallyOrderedSet.java +++ /dev/null @@ -1,241 +0,0 @@ -/* - Copyright (C) 2005-2012, by the President and Fellows of Harvard College. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - Dataverse Network - A web application to share, preserve and analyze research data. - Developed at the Institute for Quantitative Social Science, Harvard University. - Version 3.0. -*/ - -package edu.harvard.iq.dataverse.ingest.plugin.spi; - -// This file was Taken out from openjdk-6-src-b16-24_apr_2009.tar.gz -// http://download.java.net/openjdk/jdk6/promoted/b16/openjdk-6-src-b16-24_apr_2009.tar.gz -// downloaded: 2009-05-07 - - -/* - * Copyright 2000 Sun Microsystems, Inc. All Rights Reserved. - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. - * - * This code is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License version 2 only, as - * published by the Free Software Foundation. Sun designates this - * particular file as subject to the "Classpath" exception as provided - * by Sun in the LICENSE file that accompanied this code. - * - * This code is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or - * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License - * version 2 for more details (a copy is included in the LICENSE file that - * accompanied this code). - * - * You should have received a copy of the GNU General Public License version - * 2 along with this work; if not, write to the Free Software Foundation, - * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. - * - * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, - * CA 95054 USA or visit www.sun.com if you need additional information or - * have any questions. - */ - -//package javax.imageio.spi; - -import java.util.AbstractSet; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.Map; -import java.util.Set; - -/** - * A set of Objects with pairwise orderings between them. - * The iterator method provides the elements in - * topologically sorted order. Elements participating in a cycle - * are not returned. - * - * Unlike the SortedSet and SortedMap - * interfaces, which require their elements to implement the - * Comparable interface, this class receives ordering - * information via its setOrdering and - * unsetPreference methods. This difference is due to - * the fact that the relevant ordering between elements is unlikely to - * be inherent in the elements themselves; rather, it is set - * dynamically accoring to application policy. For example, in a - * service provider registry situation, an application might allow the - * user to set a preference order for service provider objects - * supplied by a trusted vendor over those supplied by another. - * - */ -class PartiallyOrderedSet extends AbstractSet { - - // The topological sort (roughly) follows the algorithm described in - // Horowitz and Sahni, _Fundamentals of Data Structures_ (1976), - // p. 315. - - // Maps Objects to DigraphNodes that contain them - private Map poNodes = new HashMap(); - - // The set of Objects - private Set nodes = poNodes.keySet(); - - /** - * Constructs a PartiallyOrderedSet. - */ - public PartiallyOrderedSet() {} - - public int size() { - return nodes.size(); - } - - public boolean contains(Object o) { - return nodes.contains(o); - } - - /** - * Returns an iterator over the elements contained in this - * collection, with an ordering that respects the orderings set - * by the setOrdering method. - */ - public Iterator iterator() { - return new PartialOrderIterator(poNodes.values().iterator()); - } - - /** - * Adds an Object to this - * PartiallyOrderedSet. - */ - public boolean add(Object o) { - if (nodes.contains(o)) { - return false; - } - - DigraphNode node = new DigraphNode(o); - poNodes.put(o, node); - return true; - } - - /** - * Removes an Object from this - * PartiallyOrderedSet. - */ - public boolean remove(Object o) { - DigraphNode node = (DigraphNode)poNodes.get(o); - if (node == null) { - return false; - } - - poNodes.remove(o); - node.dispose(); - return true; - } - - public void clear() { - poNodes.clear(); - } - - /** - * Sets an ordering between two nodes. When an iterator is - * requested, the first node will appear earlier in the - * sequence than the second node. If a prior ordering existed - * between the nodes in the opposite order, it is removed. - * - * @return true if no prior ordering existed - * between the nodes, falseotherwise. - */ - public boolean setOrdering(Object first, Object second) { - DigraphNode firstPONode = - (DigraphNode)poNodes.get(first); - DigraphNode secondPONode = - (DigraphNode)poNodes.get(second); - - secondPONode.removeEdge(firstPONode); - return firstPONode.addEdge(secondPONode); - } - - /** - * Removes any ordering between two nodes. - * - * @return true if a prior prefence existed between the nodes. - */ - public boolean unsetOrdering(Object first, Object second) { - DigraphNode firstPONode = - (DigraphNode)poNodes.get(first); - DigraphNode secondPONode = - (DigraphNode)poNodes.get(second); - - return firstPONode.removeEdge(secondPONode) || - secondPONode.removeEdge(firstPONode); - } - - /** - * Returns true if an ordering exists between two - * nodes. - */ - public boolean hasOrdering(Object preferred, Object other) { - DigraphNode preferredPONode = - (DigraphNode)poNodes.get(preferred); - DigraphNode otherPONode = - (DigraphNode)poNodes.get(other); - - return preferredPONode.hasEdge(otherPONode); - } -} - -class PartialOrderIterator implements Iterator { - - LinkedList zeroList = new LinkedList(); - Map inDegrees = new HashMap(); // DigraphNode -> Integer - - public PartialOrderIterator(Iterator iter) { - // Initialize scratch in-degree values, zero list - while (iter.hasNext()) { - DigraphNode node = (DigraphNode)iter.next(); - int inDegree = node.getInDegree(); - inDegrees.put(node, new Integer(inDegree)); - - // Add nodes with zero in-degree to the zero list - if (inDegree == 0) { - zeroList.add(node); - } - } - } - - public boolean hasNext() { - return !zeroList.isEmpty(); - } - - public Object next() { - DigraphNode first = (DigraphNode)zeroList.removeFirst(); - - // For each out node of the output node, decrement its in-degree - Iterator outNodes = first.getOutNodes(); - while (outNodes.hasNext()) { - DigraphNode node = (DigraphNode)outNodes.next(); - int inDegree = ((Integer)inDegrees.get(node)).intValue() - 1; - inDegrees.put(node, new Integer(inDegree)); - - // If the in-degree has fallen to 0, place the node on the list - if (inDegree == 0) { - zeroList.add(node); - } - } - - return first.getData(); - } - - public void remove() { - throw new UnsupportedOperationException(); - } -} diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/RegisterableService.java b/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/RegisterableService.java deleted file mode 100644 index d3609b1e4b9..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/RegisterableService.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - Copyright (C) 2005-2012, by the President and Fellows of Harvard College. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - Dataverse Network - A web application to share, preserve and analyze research data. - Developed at the Institute for Quantitative Social Science, Harvard University. - Version 3.0. -*/ - -package edu.harvard.iq.dataverse.ingest.plugin.spi; - -// This file was Taken out from openjdk-6-src-b16-24_apr_2009.tar.gz -// http://download.java.net/openjdk/jdk6/promoted/b16/openjdk-6-src-b16-24_apr_2009.tar.gz -// downloaded: 2009-05-07 - - -/* - * Copyright 2000-2004 Sun Microsystems, Inc. All Rights Reserved. - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. - * - * This code is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License version 2 only, as - * published by the Free Software Foundation. Sun designates this - * particular file as subject to the "Classpath" exception as provided - * by Sun in the LICENSE file that accompanied this code. - * - * This code is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or - * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License - * version 2 for more details (a copy is included in the LICENSE file that - * accompanied this code). - * - * You should have received a copy of the GNU General Public License version - * 2 along with this work; if not, write to the Free Software Foundation, - * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. - * - * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, - * CA 95054 USA or visit www.sun.com if you need additional information or - * have any questions. - */ - - -/** - * An optional interface that may be provided by service provider - * objects that will be registered with a - * ServiceRegistry. If this interface is present, - * notification of registration and deregistration will be performed. - * - * @see ServiceRegistry - * - */ -public interface RegisterableService { - - /** - * Called when an object implementing this interface is added to - * the given category of the given - * registry. The object may already be registered - * under another category or categories. - * - * @param registry a ServiceRegistry where this - * object has been registered. - * @param category a Class object indicating the - * registry category under which this object has been registered. - */ - void onRegistration(ServiceRegistry registry, Class category); - - /** - * Called when an object implementing this interface is removed - * from the given category of the given - * registry. The object may still be registered - * under another category or categories. - * - * @param registry a ServiceRegistry from which this - * object is being (wholly or partially) deregistered. - * @param category a Class object indicating the - * registry category from which this object is being deregistered. - */ - void onDeregistration(ServiceRegistry registry, Class category); -} diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/ServiceRegistry.java b/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/ServiceRegistry.java deleted file mode 100644 index 1794adb5de2..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/ServiceRegistry.java +++ /dev/null @@ -1,861 +0,0 @@ -/* - Copyright (C) 2005-2012, by the President and Fellows of Harvard College. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - Dataverse Network - A web application to share, preserve and analyze research data. - Developed at the Institute for Quantitative Social Science, Harvard University. - Version 3.0. -*/ -package edu.harvard.iq.dataverse.ingest.plugin.spi; - - - -// This file was Taken out from openjdk-6-src-b16-24_apr_2009.tar.gz -// http://download.java.net/openjdk/jdk6/promoted/b16/openjdk-6-src-b16-24_apr_2009.tar.gz -// downloaded: 2009-05-07 - - -/* - * Copyright 2000-2007 Sun Microsystems, Inc. All Rights Reserved. - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. - * - * This code is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License version 2 only, as - * published by the Free Software Foundation. Sun designates this - * particular file as subject to the "Classpath" exception as provided - * by Sun in the LICENSE file that accompanied this code. - * - * This code is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or - * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License - * version 2 for more details (a copy is included in the LICENSE file that - * accompanied this code). - * - * You should have received a copy of the GNU General Public License version - * 2 along with this work; if not, write to the Free Software Foundation, - * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. - * - * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, - * CA 95054 USA or visit www.sun.com if you need additional information or - * have any questions. - */ - -//package javax.imageio.spi; - -import java.io.File; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Set; -import java.util.ServiceLoader; - -/** - * A registry for service provider instances. - * - *

A service is a well-known set of interfaces and (usually - * abstract) classes. A service provider is a specific - * implementation of a service. The classes in a provider typically - * implement the interface or subclass the class defined by the - * service itself. - * - *

Service providers are stored in one or more categories, - * each of which is defined by a class of interface (described by a - * Class object) that all of its members must implement. - * The set of categories may be changed dynamically. - * - *

Only a single instance of a given leaf class (that is, the - * actual class returned by getClass(), as opposed to any - * inherited classes or interfaces) may be registered. That is, - * suppose that the - * com.mycompany.mypkg.GreenServiceProvider class - * implements the com.mycompany.mypkg.MyService - * interface. If a GreenServiceProvider instance is - * registered, it will be stored in the category defined by the - * MyService class. If a new instance of - * GreenServiceProvider is registered, it will replace - * the previous instance. In practice, service provider objects are - * usually singletons so this behavior is appropriate. - * - *

To declare a service provider, a services - * subdirectory is placed within the META-INF directory - * that is present in every JAR file. This directory contains a file - * for each service provider interface that has one or more - * implementation classes present in the JAR file. For example, if - * the JAR file contained a class named - * com.mycompany.mypkg.MyServiceImpl which implements the - * javax.someapi.SomeService interface, the JAR file - * would contain a file named:

- * META-INF/services/javax.someapi.SomeService 
- * - * containing the line: - * - *
- * com.mycompany.mypkg.MyService
- * 
- * - *

The service provider classes should be to be lightweight and - * quick to load. Implementations of these interfaces should avoid - * complex dependencies on other classes and on native code. The usual - * pattern for more complex services is to register a lightweight - * proxy for the heavyweight service. - * - *

An application may customize the contents of a registry as it - * sees fit, so long as it has the appropriate runtime permission. - * - *

For more details on declaring service providers, and the JAR - * format in general, see the - * JAR File Specification. - * - * @see RegisterableService - * - */ -public class ServiceRegistry { - - // Class -> Registry - private Map, SubRegistry> categoryMap = new HashMap<>(); - - /** - * Constructs a ServiceRegistry instance with a - * set of categories taken from the categories - * argument. - * - * @param categories an Iterator containing - * Class objects to be used to define categories. - * - * @exception IllegalArgumentException if - * categories is null. - */ - public ServiceRegistry(Iterator> categories) { - if (categories == null) { - throw new IllegalArgumentException("categories == null!"); - } - while (categories.hasNext()) { - Class category = categories.next(); - SubRegistry reg = new SubRegistry(this, category); - categoryMap.put(category, reg); - } - } - - // The following two methods expose functionality from - // sun.misc.Service. If that class is made public, they may be - // removed. - // - // The sun.misc.ServiceConfigurationError class may also be - // exposed, in which case the references to 'an - // Error' below should be changed to 'a - // ServiceConfigurationError'. - - /** - * Searches for implementations of a particular service class - * using the given class loader. - * - *

This method transforms the name of the given service class - * into a provider-configuration filename as described in the - * class comment and then uses the getResources - * method of the given class loader to find all available files - * with that name. These files are then read and parsed to - * produce a list of provider-class names. The iterator that is - * returned uses the given class loader to look up and then - * instantiate each element of the list. - * - *

Because it is possible for extensions to be installed into - * a running Java virtual machine, this method may return - * different results each time it is invoked. - * - * @param providerClass a Classobject indicating the - * class or interface of the service providers being detected. - * - * @param loader the class loader to be used to load - * provider-configuration files and instantiate provider classes, - * or null if the system class loader (or, failing that - * the bootstrap class loader) is to be used. - * - * @return An Iterator that yields provider objects - * for the given service, in some arbitrary order. The iterator - * will throw an Error if a provider-configuration - * file violates the specified format or if a provider class - * cannot be found and instantiated. - * - * @exception IllegalArgumentException if - * providerClass is null. - */ - public static Iterator lookupProviders(Class providerClass, - ClassLoader loader) - { - if (providerClass == null) { - throw new IllegalArgumentException("providerClass == null!"); - } - return ServiceLoader.load(providerClass, loader).iterator(); - } - - /** - * Locates and incrementally instantiates the available providers - * of a given service using the context class loader. This - * convenience method is equivalent to: - * - *

-     *   ClassLoader cl = Thread.currentThread().getContextClassLoader();
-     *   return Service.providers(service, cl);
-     * 
- * - * @param providerClass a Classobject indicating the - * class or interface of the service providers being detected. - * - * @return An Iterator that yields provider objects - * for the given service, in some arbitrary order. The iterator - * will throw an Error if a provider-configuration - * file violates the specified format or if a provider class - * cannot be found and instantiated. - * - * @exception IllegalArgumentException if - * providerClass is null. - */ - public static Iterator lookupProviders(Class providerClass) { - if (providerClass == null) { - throw new IllegalArgumentException("providerClass == null!"); - } - return ServiceLoader.load(providerClass).iterator(); - } - - /** - * Returns an Iterator of Class objects - * indicating the current set of categories. The iterator will be - * empty if no categories exist. - * - * @return an Iterator containing - * Classobjects. - */ - public Iterator> getCategories() { - Set> keySet = categoryMap.keySet(); - return keySet.iterator(); - } - - /** - * Returns an Iterator containing the subregistries to which the - * provider belongs. - */ - private Iterator getSubRegistries(Object provider) { - List l = new ArrayList<>(); - Iterator> iter = categoryMap.keySet().iterator(); - while (iter.hasNext()) { - Class c = iter.next(); - if (c.isAssignableFrom(provider.getClass())) { - l.add(categoryMap.get(c)); - } - } - return l.iterator(); - } - - /** - * Adds a service provider object to the registry. The provider - * is associated with the given category. - * - *

If provider implements the - * RegisterableService interface, its - * onRegistration method will be called. Its - * onDeregistration method will be called each time - * it is deregistered from a category, for example if a - * category is removed or the registry is garbage collected. - * - * @param provider the service provide object to be registered. - * @param category the category under which to register the - * provider. - * - * @return true if no provider of the same class was previously - * registered in the same category category. - * - * @exception IllegalArgumentException if provider is - * null. - * @exception IllegalArgumentException if there is no category - * corresponding to category. - * @exception ClassCastException if provider does not implement - * the Class defined by category. - */ - public boolean registerServiceProvider(T provider, - Class category) { - if (provider == null) { - throw new IllegalArgumentException("provider == null!"); - } - SubRegistry reg = categoryMap.get(category); - if (reg == null) { - throw new IllegalArgumentException("category unknown!"); - } - if (!category.isAssignableFrom(provider.getClass())) { - throw new ClassCastException(); - } - - return reg.registerServiceProvider(provider); - } - - /** - * Adds a service provider object to the registry. The provider - * is associated within each category present in the registry - * whose Class it implements. - * - *

If provider implements the - * RegisterableService interface, its - * onRegistration method will be called once for each - * category it is registered under. Its - * onDeregistration method will be called each time - * it is deregistered from a category or when the registry is - * finalized. - * - * @param provider the service provider object to be registered. - * - * @exception IllegalArgumentException if - * provider is null. - */ - public void registerServiceProvider(Object provider) { - if (provider == null) { - throw new IllegalArgumentException("provider == null!"); - } - Iterator regs = getSubRegistries(provider); - while (regs.hasNext()) { - SubRegistry reg = regs.next(); - reg.registerServiceProvider(provider); - } - } - - /** - * Adds a set of service provider objects, taken from an - * Iterator to the registry. Each provider is - * associated within each category present in the registry whose - * Class it implements. - * - *

For each entry of providers that implements - * the RegisterableService interface, its - * onRegistration method will be called once for each - * category it is registered under. Its - * onDeregistration method will be called each time - * it is deregistered from a category or when the registry is - * finalized. - * - * @param providers an Iterator containing service provider - * objects to be registered. - * - * @exception IllegalArgumentException if providers - * is null or contains a null entry. - */ - public void registerServiceProviders(Iterator providers) { - if (providers == null) { - throw new IllegalArgumentException("provider == null!"); - } - while (providers.hasNext()) { - registerServiceProvider(providers.next()); - } - } - - /** - * Removes a service provider object from the given category. If - * the provider was not previously registered, nothing happens and - * false is returned. Otherwise, true - * is returned. If an object of the same class as - * provider but not equal (using ==) to - * provider is registered, it will not be - * deregistered. - * - *

If provider implements the - * RegisterableService interface, its - * onDeregistration method will be called. - * - * @param provider the service provider object to be deregistered. - * @param category the category from which to deregister the - * provider. - * - * @return true if the provider was previously - * registered in the same category category, - * false otherwise. - * - * @exception IllegalArgumentException if provider is - * null. - * @exception IllegalArgumentException if there is no category - * corresponding to category. - * @exception ClassCastException if provider does not implement - * the class defined by category. - */ - public boolean deregisterServiceProvider(T provider, - Class category) { - if (provider == null) { - throw new IllegalArgumentException("provider == null!"); - } - SubRegistry reg = categoryMap.get(category); - if (reg == null) { - throw new IllegalArgumentException("category unknown!"); - } - if (!category.isAssignableFrom(provider.getClass())) { - throw new ClassCastException(); - } - return reg.deregisterServiceProvider(provider); - } - - /** - * Removes a service provider object from all categories that - * contain it. - * - * @param provider the service provider object to be deregistered. - * - * @exception IllegalArgumentException if provider is - * null. - */ - public void deregisterServiceProvider(Object provider) { - if (provider == null) { - throw new IllegalArgumentException("provider == null!"); - } - Iterator regs = getSubRegistries(provider); - while (regs.hasNext()) { - SubRegistry reg = regs.next(); - reg.deregisterServiceProvider(provider); - } - } - - /** - * Returns true if provider is currently - * registered. - * - * @param provider the service provider object to be queried. - * - * @return true if the given provider has been - * registered. - * - * @exception IllegalArgumentException if provider is - * null. - */ - public boolean contains(Object provider) { - if (provider == null) { - throw new IllegalArgumentException("provider == null!"); - } - Iterator regs = getSubRegistries(provider); - while (regs.hasNext()) { - SubRegistry reg = regs.next(); - if (reg.contains(provider)) { - return true; - } - } - - return false; - } - - /** - * Returns an Iterator containing all registered - * service providers in the given category. If - * useOrdering is false, the iterator - * will return all of the server provider objects in an arbitrary - * order. Otherwise, the ordering will respect any pairwise - * orderings that have been set. If the graph of pairwise - * orderings contains cycles, any providers that belong to a cycle - * will not be returned. - * - * @param category the category to be retrieved from. - * @param useOrdering true if pairwise orderings - * should be taken account in ordering the returned objects. - * - * @return an Iterator containing service provider - * objects from the given category, possibly in order. - * - * @exception IllegalArgumentException if there is no category - * corresponding to category. - */ - public Iterator getServiceProviders(Class category, - boolean useOrdering) { - SubRegistry reg = categoryMap.get(category); - if (reg == null) { - throw new IllegalArgumentException("category unknown!"); - } - return reg.getServiceProviders(useOrdering); - } - - /** - * A simple filter interface used by - * ServiceRegistry.getServiceProviders to select - * providers matching an arbitrary criterion. Classes that - * implement this interface should be defined in order to make use - * of the getServiceProviders method of - * ServiceRegistry that takes a Filter. - * - * @see ServiceRegistry#getServiceProviders(Class, ServiceRegistry.Filter, boolean) - */ - public interface Filter { - - /** - * Returns true if the given - * provider object matches the criterion defined - * by this Filter. - * - * @param provider a service provider Object. - * - * @return true if the provider matches the criterion. - */ - boolean filter(Object provider); - } - - /** - * Returns an Iterator containing service provider - * objects within a given category that satisfy a criterion - * imposed by the supplied ServiceRegistry.Filter - * object's filter method. - * - *

The useOrdering argument controls the - * ordering of the results using the same rules as - * getServiceProviders(Class, boolean). - * - * @param category the category to be retrieved from. - * @param filter an instance of ServiceRegistry.Filter - * whose filter method will be invoked. - * @param useOrdering true if pairwise orderings - * should be taken account in ordering the returned objects. - * - * @return an Iterator containing service provider - * objects from the given category, possibly in order. - * - * @exception IllegalArgumentException if there is no category - * corresponding to category. - */ - public Iterator getServiceProviders(Class category, - Filter filter, - boolean useOrdering) { - SubRegistry reg = categoryMap.get(category); - if (reg == null) { - throw new IllegalArgumentException("category unknown!"); - } - Iterator iter = getServiceProviders(category, useOrdering); - return new FilterIterator(iter, filter); - } - - /** - * Returns the currently registered service provider object that - * is of the given class type. At most one object of a given - * class is allowed to be registered at any given time. If no - * registered object has the desired class type, null - * is returned. - * - * @param providerClass the Class of the desired - * service provider object. - * - * @return a currently registered service provider object with the - * desired Classtype, or null is none is - * present. - * - * @exception IllegalArgumentException if providerClass is - * null. - */ - public T getServiceProviderByClass(Class providerClass) { - if (providerClass == null) { - throw new IllegalArgumentException("providerClass == null!"); - } - for (Class c : categoryMap.keySet()) { - if (c.isAssignableFrom(providerClass)) { - SubRegistry reg = (SubRegistry)categoryMap.get(c); - T provider = reg.getServiceProviderByClass(providerClass); - if (provider != null) { - return provider; - } - } - } - return null; - } - - /** - * Sets a pairwise ordering between two service provider objects - * within a given category. If one or both objects are not - * currently registered within the given category, or if the - * desired ordering is already set, nothing happens and - * false is returned. If the providers previously - * were ordered in the reverse direction, that ordering is - * removed. - * - *

The ordering will be used by the - * getServiceProviders methods when their - * useOrdering argument is true. - * - * @param category a Class object indicating the - * category under which the preference is to be established. - * @param firstProvider the preferred provider. - * @param secondProvider the provider to which - * firstProvider is preferred. - * - * @return true if a previously unset ordering - * was established. - * - * @exception IllegalArgumentException if either provider is - * null or they are the same object. - * @exception IllegalArgumentException if there is no category - * corresponding to category. - */ - public boolean setOrdering(Class category, - T firstProvider, - T secondProvider) { - if (firstProvider == null || secondProvider == null) { - throw new IllegalArgumentException("provider is null!"); - } - if (firstProvider == secondProvider) { - throw new IllegalArgumentException("providers are the same!"); - } - SubRegistry reg = (SubRegistry)categoryMap.get(category); - if (reg == null) { - throw new IllegalArgumentException("category unknown!"); - } - if (reg.contains(firstProvider) && - reg.contains(secondProvider)) { - return reg.setOrdering(firstProvider, secondProvider); - } - return false; - } - - /** - * Sets a pairwise ordering between two service provider objects - * within a given category. If one or both objects are not - * currently registered within the given category, or if no - * ordering is currently set between them, nothing happens - * and false is returned. - * - *

The ordering will be used by the - * getServiceProviders methods when their - * useOrdering argument is true. - * - * @param category a Class object indicating the - * category under which the preference is to be disestablished. - * @param firstProvider the formerly preferred provider. - * @param secondProvider the provider to which - * firstProvider was formerly preferred. - * - * @return true if a previously set ordering was - * disestablished. - * - * @exception IllegalArgumentException if either provider is - * null or they are the same object. - * @exception IllegalArgumentException if there is no category - * corresponding to category. - */ - public boolean unsetOrdering(Class category, - T firstProvider, - T secondProvider) { - if (firstProvider == null || secondProvider == null) { - throw new IllegalArgumentException("provider is null!"); - } - if (firstProvider == secondProvider) { - throw new IllegalArgumentException("providers are the same!"); - } - SubRegistry reg = (SubRegistry)categoryMap.get(category); - if (reg == null) { - throw new IllegalArgumentException("category unknown!"); - } - if (reg.contains(firstProvider) && - reg.contains(secondProvider)) { - return reg.unsetOrdering(firstProvider, secondProvider); - } - return false; - } - - /** - * Deregisters all service provider object currently registered - * under the given category. - * - * @param category the category to be emptied. - * - * @exception IllegalArgumentException if there is no category - * corresponding to category. - */ - public void deregisterAll(Class category) { - SubRegistry reg = (SubRegistry)categoryMap.get(category); - if (reg == null) { - throw new IllegalArgumentException("category unknown!"); - } - reg.clear(); - } - - /** - * Deregisters all currently registered service providers from all - * categories. - */ - public void deregisterAll() { - Iterator iter = categoryMap.values().iterator(); - while (iter.hasNext()) { - SubRegistry reg = (SubRegistry)iter.next(); - reg.clear(); - } - } - - /** - * Finalizes this object prior to garbage collection. The - * deregisterAll method is called to deregister all - * currently registered service providers. This method should not - * be called from application code. - * - * @exception Throwable if an error occurs during superclass - * finalization. - */ - public void finalize() throws Throwable { - deregisterAll(); - super.finalize(); - } -} - - -/** - * A portion of a registry dealing with a single superclass or - * interface. - */ -class SubRegistry { - - ServiceRegistry registry; - - Class category; - - // Provider Objects organized by partial oridering - PartiallyOrderedSet poset = new PartiallyOrderedSet(); - - // Class -> Provider Object of that class - Map,Object> map = new HashMap<>(); - - public SubRegistry(ServiceRegistry registry, Class category) { - this.registry = registry; - this.category = category; - } - - public boolean registerServiceProvider(Object provider) { - Object oprovider = map.get(provider.getClass()); - boolean present = oprovider != null; - - if (present) { - deregisterServiceProvider(oprovider); - } - map.put(provider.getClass(), provider); - poset.add(provider); - if (provider instanceof RegisterableService) { - RegisterableService rs = (RegisterableService)provider; - rs.onRegistration(registry, category); - } - - return !present; - } - - /** - * If the provider was not previously registered, do nothing. - * - * @return true if the provider was previously registered. - */ - public boolean deregisterServiceProvider(Object provider) { - Object oprovider = map.get(provider.getClass()); - - if (provider == oprovider) { - map.remove(provider.getClass()); - poset.remove(provider); - if (provider instanceof RegisterableService) { - RegisterableService rs = (RegisterableService)provider; - rs.onDeregistration(registry, category); - } - - return true; - } - return false; - } - - public boolean contains(Object provider) { - Object oprovider = map.get(provider.getClass()); - return oprovider == provider; - } - - public boolean setOrdering(Object firstProvider, - Object secondProvider) { - return poset.setOrdering(firstProvider, secondProvider); - } - - public boolean unsetOrdering(Object firstProvider, - Object secondProvider) { - return poset.unsetOrdering(firstProvider, secondProvider); - } - - public Iterator getServiceProviders(boolean useOrdering) { - if (useOrdering) { - return poset.iterator(); - } else { - return map.values().iterator(); - } - } - - public T getServiceProviderByClass(Class providerClass) { - return (T)map.get(providerClass); - } - - public void clear() { - Iterator iter = map.values().iterator(); - while (iter.hasNext()) { - Object provider = iter.next(); - iter.remove(); - - if (provider instanceof RegisterableService) { - RegisterableService rs = (RegisterableService)provider; - rs.onDeregistration(registry, category); - } - } - poset.clear(); - } - - public void finalize() { - clear(); - } -} - - -/** - * A class for wrapping Iterators with a filter function. - * This provides an iterator for a subset without duplication. - */ -class FilterIterator implements Iterator { - - private Iterator iter; - private ServiceRegistry.Filter filter; - - private T next = null; - - public FilterIterator(Iterator iter, - ServiceRegistry.Filter filter) { - this.iter = iter; - this.filter = filter; - advance(); - } - - private void advance() { - while (iter.hasNext()) { - T elt = iter.next(); - if (filter.filter(elt)) { - next = elt; - return; - } - } - - next = null; - } - - public boolean hasNext() { - return next != null; - } - - public T next() { - if (next == null) { - throw new NoSuchElementException(); - } - T o = next; - advance(); - return o; - } - - public void remove() { - throw new UnsupportedOperationException(); - } -} From 6cfe538cac9ed16873e277d117eacfd1e4fd7fd9 Mon Sep 17 00:00:00 2001 From: bencomp Date: Tue, 18 Oct 2022 00:03:48 +0200 Subject: [PATCH 0006/1525] Update HTTP URL to HTTPS URL --- .../source/_static/navbarscroll.js | 2 +- .../source/_templates/navbar.html | 32 +++++++++---------- doc/sphinx-guides/source/admin/monitoring.rst | 2 +- .../source/api/client-libraries.rst | 6 ++-- .../source/api/external-tools.rst | 4 +-- .../source/api/getting-started.rst | 2 +- doc/sphinx-guides/source/api/intro.rst | 2 +- doc/sphinx-guides/source/api/native-api.rst | 12 +++---- doc/sphinx-guides/source/api/sword.rst | 12 +++---- doc/sphinx-guides/source/conf.py | 2 +- .../source/developers/dev-environment.rst | 8 ++--- .../source/developers/documentation.rst | 6 ++-- doc/sphinx-guides/source/developers/intro.rst | 6 ++-- .../source/developers/testing.rst | 8 ++--- doc/sphinx-guides/source/developers/tools.rst | 8 ++--- .../source/developers/unf/index.rst | 2 +- .../source/developers/unf/unf-v3.rst | 6 ++-- .../source/developers/unf/unf-v6.rst | 2 +- .../source/developers/version-control.rst | 2 +- doc/sphinx-guides/source/index.rst | 4 +-- .../source/installation/config.rst | 26 +++++++-------- .../source/installation/installation-main.rst | 6 ++-- .../source/installation/intro.rst | 4 +-- .../source/installation/oauth2.rst | 2 +- .../source/installation/oidc.rst | 2 +- .../source/installation/prerequisites.rst | 4 +-- .../source/installation/shibboleth.rst | 6 ++-- .../source/style/foundations.rst | 16 +++++----- doc/sphinx-guides/source/style/patterns.rst | 28 ++++++++-------- doc/sphinx-guides/source/user/account.rst | 2 +- doc/sphinx-guides/source/user/appendix.rst | 10 +++--- .../source/user/dataset-management.rst | 6 ++-- .../user/tabulardataingest/ingestprocess.rst | 4 +-- 33 files changed, 122 insertions(+), 122 deletions(-) diff --git a/doc/sphinx-guides/source/_static/navbarscroll.js b/doc/sphinx-guides/source/_static/navbarscroll.js index 66c9d4d7995..735f80870cd 100644 --- a/doc/sphinx-guides/source/_static/navbarscroll.js +++ b/doc/sphinx-guides/source/_static/navbarscroll.js @@ -1,6 +1,6 @@ /* Use to fix hidden section headers behind the navbar when using links with targets - See: http://stackoverflow.com/questions/10732690/offsetting-an-html-anchor-to-adjust-for-fixed-header + See: https://stackoverflow.com/questions/10732690/offsetting-an-html-anchor-to-adjust-for-fixed-header */ $jqTheme(document).ready(function() { $jqTheme('a[href*="#"]:not([href="#"])').on('click', function() { diff --git a/doc/sphinx-guides/source/_templates/navbar.html b/doc/sphinx-guides/source/_templates/navbar.html index 538cccf74d7..c7b81dcb937 100644 --- a/doc/sphinx-guides/source/_templates/navbar.html +++ b/doc/sphinx-guides/source/_templates/navbar.html @@ -15,7 +15,7 @@ - Dataverse Project + Dataverse Project @@ -24,15 +24,15 @@

  • - + Community
  • @@ -49,18 +49,18 @@
  • - + Contact
  • diff --git a/doc/sphinx-guides/source/admin/monitoring.rst b/doc/sphinx-guides/source/admin/monitoring.rst index a4affda1302..e902d5fdcc9 100644 --- a/doc/sphinx-guides/source/admin/monitoring.rst +++ b/doc/sphinx-guides/source/admin/monitoring.rst @@ -14,7 +14,7 @@ In production you'll want to monitor the usual suspects such as CPU, memory, fre Munin +++++ -http://munin-monitoring.org says, "A default installation provides a lot of graphs with almost no work." From RHEL or CentOS 7, you can try the following steps. +https://munin-monitoring.org says, "A default installation provides a lot of graphs with almost no work." From RHEL or CentOS 7, you can try the following steps. Enable the EPEL yum repo (if you haven't already): diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst index 634f03a8125..388a9d641ed 100755 --- a/doc/sphinx-guides/source/api/client-libraries.rst +++ b/doc/sphinx-guides/source/api/client-libraries.rst @@ -13,7 +13,7 @@ Python There are two Python modules for interacting with Dataverse Software APIs. -`pyDataverse `_ primarily allows developers to manage Dataverse collections, datasets and datafiles. Its intention is to help with data migrations and DevOps activities such as testing and configuration management. The module is developed by `Stefan Kasberger `_ from `AUSSDA - The Austrian Social Science Data Archive `_. +`pyDataverse `_ primarily allows developers to manage Dataverse collections, datasets and datafiles. Its intention is to help with data migrations and DevOps activities such as testing and configuration management. The module is developed by `Stefan Kasberger `_ from `AUSSDA - The Austrian Social Science Data Archive `_. `dataverse-client-python `_ had its initial release in 2015. `Robert Liebowitz `_ created this library while at the `Center for Open Science (COS) `_ and the COS uses it to integrate the `Open Science Framework (OSF) `_ with a Dataverse installation via an add-on which itself is open source and listed on the :doc:`/api/apps` page. @@ -30,14 +30,14 @@ R https://github.com/IQSS/dataverse-client-r is the official R package for Dataverse Software APIs. The latest release can be installed from `CRAN `_. The R client can search and download datasets. It is useful when automatically (instead of manually) downloading data files as part of a script. For bulk edit and upload operations, we currently recommend pyDataverse. -The package is currently maintained by `Shiro Kuriwaki `_. It was originally created by `Thomas Leeper `_ and then formerly maintained by `Will Beasley `_. +The package is currently maintained by `Shiro Kuriwaki `_. It was originally created by `Thomas Leeper `_ and then formerly maintained by `Will Beasley `_. Java ---- https://github.com/IQSS/dataverse-client-java is the official Java library for Dataverse Software APIs. -`Richard Adams `_ from `ResearchSpace `_ created and maintains this library. +`Richard Adams `_ from `ResearchSpace `_ created and maintains this library. Ruby ---- diff --git a/doc/sphinx-guides/source/api/external-tools.rst b/doc/sphinx-guides/source/api/external-tools.rst index d72a6f62004..8c6c9fa8d46 100644 --- a/doc/sphinx-guides/source/api/external-tools.rst +++ b/doc/sphinx-guides/source/api/external-tools.rst @@ -11,7 +11,7 @@ Introduction External tools are additional applications the user can access or open from your Dataverse installation to preview, explore, and manipulate data files and datasets. The term "external" is used to indicate that the tool is not part of the main Dataverse Software. -Once you have created the external tool itself (which is most of the work!), you need to teach a Dataverse installation how to construct URLs that your tool needs to operate. For example, if you've deployed your tool to fabulousfiletool.com your tool might want the ID of a file and the siteUrl of the Dataverse installation like this: https://fabulousfiletool.com?fileId=42&siteUrl=http://demo.dataverse.org +Once you have created the external tool itself (which is most of the work!), you need to teach a Dataverse installation how to construct URLs that your tool needs to operate. For example, if you've deployed your tool to fabulousfiletool.com your tool might want the ID of a file and the siteUrl of the Dataverse installation like this: https://fabulousfiletool.com?fileId=42&siteUrl=https://demo.dataverse.org In short, you will be creating a manifest in JSON format that describes not only how to construct URLs for your tool, but also what types of files your tool operates on, where it should appear in the Dataverse installation web interfaces, etc. @@ -94,7 +94,7 @@ Terminology toolParameters **Query parameters** are supported and described below. - queryParameters **Key/value combinations** that can be appended to the toolUrl. For example, once substitution takes place (described below) the user may be redirected to ``https://fabulousfiletool.com?fileId=42&siteUrl=http://demo.dataverse.org``. + queryParameters **Key/value combinations** that can be appended to the toolUrl. For example, once substitution takes place (described below) the user may be redirected to ``https://fabulousfiletool.com?fileId=42&siteUrl=https://demo.dataverse.org``. query parameter keys An **arbitrary string** to associate with a value that is populated with a reserved word (described below). As the author of the tool, you have control over what "key" you would like to be passed to your tool. For example, if you want to have your tool receive and operate on the query parameter "dataverseFileId=42" instead of just "fileId=42", that's fine. diff --git a/doc/sphinx-guides/source/api/getting-started.rst b/doc/sphinx-guides/source/api/getting-started.rst index c465b726421..fd7c561cdf0 100644 --- a/doc/sphinx-guides/source/api/getting-started.rst +++ b/doc/sphinx-guides/source/api/getting-started.rst @@ -9,7 +9,7 @@ If you are a researcher or curator who wants to automate parts of your workflow, Servers You Can Test With ------------------------- -Rather than using a production Dataverse installation, API users are welcome to use http://demo.dataverse.org for testing. You can email support@dataverse.org if you have any trouble with this server. +Rather than using a production Dataverse installation, API users are welcome to use https://demo.dataverse.org for testing. You can email support@dataverse.org if you have any trouble with this server. If you would rather have full control over your own test server, deployments to AWS, Docker, Vagrant, and more are covered in the :doc:`/developers/index` and the :doc:`/installation/index`. diff --git a/doc/sphinx-guides/source/api/intro.rst b/doc/sphinx-guides/source/api/intro.rst index 933932cd7b9..6c61bb8c20d 100755 --- a/doc/sphinx-guides/source/api/intro.rst +++ b/doc/sphinx-guides/source/api/intro.rst @@ -237,7 +237,7 @@ Dataverse Software API questions are on topic in all the usual places: - The dataverse-community Google Group: https://groups.google.com/forum/#!forum/dataverse-community - The Dataverse Project community calls: https://dataverse.org/community-calls -- The Dataverse Project chat room: http://chat.dataverse.org +- The Dataverse Project chat room: https://chat.dataverse.org - The Dataverse Project ticketing system: support@dataverse.org After your question has been answered, you are welcome to help improve the :doc:`faq` section of this guide. diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 93e1c36f179..578b35011ff 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -9,7 +9,7 @@ The Dataverse Software exposes most of its GUI functionality via a REST-based AP .. _CORS: https://www.w3.org/TR/cors/ -.. warning:: The Dataverse Software's API is versioned at the URI - all API calls may include the version number like so: ``http://server-address/api/v1/...``. Omitting the ``v1`` part would default to the latest API version (currently 1). When writing scripts/applications that will be used for a long time, make sure to specify the API version, so they don't break when the API is upgraded. +.. warning:: The Dataverse Software's API is versioned at the URI - all API calls may include the version number like so: ``https://server-address/api/v1/...``. Omitting the ``v1`` part would default to the latest API version (currently 1). When writing scripts/applications that will be used for a long time, make sure to specify the API version, so they don't break when the API is upgraded. .. contents:: |toctitle| :local: @@ -508,7 +508,7 @@ The fully expanded example above (without environment variables) looks like this curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X PUT https://demo.dataverse.org/api/dataverses/root/metadatablocks/isRoot -.. note:: Previous endpoints ``$SERVER/api/dataverses/$id/metadatablocks/:isRoot`` and ``POST http://$SERVER/api/dataverses/$id/metadatablocks/:isRoot?key=$apiKey`` are deprecated, but supported. +.. note:: Previous endpoints ``$SERVER/api/dataverses/$id/metadatablocks/:isRoot`` and ``POST https://$SERVER/api/dataverses/$id/metadatablocks/:isRoot?key=$apiKey`` are deprecated, but supported. .. _create-dataset-command: @@ -720,7 +720,7 @@ Getting its draft version: export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB - curl -H "X-Dataverse-key:$API_TOKEN" http://$SERVER/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER + curl -H "X-Dataverse-key:$API_TOKEN" https://$SERVER/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER The fully expanded example above (without environment variables) looks like this: @@ -2226,7 +2226,7 @@ The fully expanded example above (without environment variables) looks like this Currently the following methods are used to detect file types: - The file type detected by the browser (or sent via API). -- JHOVE: http://jhove.openpreservation.org +- JHOVE: https://jhove.openpreservation.org - The file extension (e.g. ".ipybn") is used, defined in a file called ``MimeTypeDetectionByFileExtension.properties``. - The file name (e.g. "Dockerfile") is used, defined in a file called ``MimeTypeDetectionByFileName.properties``. @@ -2413,7 +2413,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ - http://demo.dataverse.org/api/files/24/metadata + https://demo.dataverse.org/api/files/24/metadata A curl example using a ``PERSISTENT_ID`` @@ -2614,7 +2614,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/prov-freeform?persistentId=doi:10.5072/FK2/AAA000" -H "Content-type:application/json" --upload-file provenance.json -See a sample JSON file :download:`file-provenance.json <../_static/api/file-provenance.json>` from http://openprovenance.org (c.f. Huynh, Trung Dong and Moreau, Luc (2014) ProvStore: a public provenance repository. At 5th International Provenance and Annotation Workshop (IPAW'14), Cologne, Germany, 09-13 Jun 2014. pp. 275-277). +See a sample JSON file :download:`file-provenance.json <../_static/api/file-provenance.json>` from https://openprovenance.org (c.f. Huynh, Trung Dong and Moreau, Luc (2014) ProvStore: a public provenance repository. At 5th International Provenance and Annotation Workshop (IPAW'14), Cologne, Germany, 09-13 Jun 2014. pp. 275-277). Delete Provenance JSON for an uploaded file ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/sphinx-guides/source/api/sword.rst b/doc/sphinx-guides/source/api/sword.rst index 11b43e98774..c9ac83bc204 100755 --- a/doc/sphinx-guides/source/api/sword.rst +++ b/doc/sphinx-guides/source/api/sword.rst @@ -9,19 +9,19 @@ SWORD_ stands for "Simple Web-service Offering Repository Deposit" and is a "pro About ----- -Introduced in Dataverse Network (DVN) `3.6 `_, the SWORD API was formerly known as the "Data Deposit API" and ``data-deposit/v1`` appeared in the URLs. For backwards compatibility these URLs continue to work (with deprecation warnings). Due to architectural changes and security improvements (especially the introduction of API tokens) in Dataverse Software 4.0, a few backward incompatible changes were necessarily introduced and for this reason the version has been increased to ``v1.1``. For details, see :ref:`incompatible`. +Introduced in Dataverse Network (DVN) `3.6 `_, the SWORD API was formerly known as the "Data Deposit API" and ``data-deposit/v1`` appeared in the URLs. For backwards compatibility these URLs continue to work (with deprecation warnings). Due to architectural changes and security improvements (especially the introduction of API tokens) in Dataverse Software 4.0, a few backward incompatible changes were necessarily introduced and for this reason the version has been increased to ``v1.1``. For details, see :ref:`incompatible`. -The Dataverse Software implements most of SWORDv2_, which is specified at http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html . Please reference the `SWORDv2 specification`_ for expected HTTP status codes (i.e. 201, 204, 404, etc.), headers (i.e. "Location"), etc. +The Dataverse Software implements most of SWORDv2_, which is specified at https://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html . Please reference the `SWORDv2 specification`_ for expected HTTP status codes (i.e. 201, 204, 404, etc.), headers (i.e. "Location"), etc. As a profile of AtomPub, XML is used throughout SWORD. As of Dataverse Software 4.0 datasets can also be created via JSON using the "native" API. SWORD is limited to the dozen or so fields listed below in the crosswalk, but the native API allows you to populate all metadata fields available in a Dataverse installation. -.. _SWORD: http://en.wikipedia.org/wiki/SWORD_%28protocol%29 +.. _SWORD: https://en.wikipedia.org/wiki/SWORD_%28protocol%29 .. _SWORDv2: http://swordapp.org/sword-v2/sword-v2-specifications/ .. _RFC 5023: https://tools.ietf.org/html/rfc5023 -.. _SWORDv2 specification: http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html +.. _SWORDv2 specification: https://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html .. _sword-auth: @@ -86,7 +86,7 @@ New features as of v1.1 - "Contact E-mail" is automatically populated from dataset owner's email. -- "Subject" uses our controlled vocabulary list of subjects. This list is in the Citation Metadata of our User Guide > `Metadata References `_. Otherwise, if a term does not match our controlled vocabulary list, it will put any subject terms in "Keyword". If Subject is empty it is automatically populated with "N/A". +- "Subject" uses our controlled vocabulary list of subjects. This list is in the Citation Metadata of our User Guide > `Metadata References `_. Otherwise, if a term does not match our controlled vocabulary list, it will put any subject terms in "Keyword". If Subject is empty it is automatically populated with "N/A". - Zero-length files are now allowed (but not necessarily encouraged). @@ -127,7 +127,7 @@ Dublin Core Terms (DC Terms) Qualified Mapping - Dataverse Project DB Element Cr +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ |dcterms:creator | authorName (LastName, FirstName) | Y | Author(s) for the Dataset. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ -|dcterms:subject | subject (Controlled Vocabulary) OR keyword | Y | Controlled Vocabulary list is in our User Guide > `Metadata References `_. | +|dcterms:subject | subject (Controlled Vocabulary) OR keyword | Y | Controlled Vocabulary list is in our User Guide > `Metadata References `_. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ |dcterms:description | dsDescriptionValue | Y | Describing the purpose, scope or nature of the Dataset. Can also use dcterms:abstract. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index 880ed561720..5ff538c3c46 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -432,7 +432,7 @@ # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'http://docs.python.org/': None} +intersphinx_mapping = {'https://docs.python.org/': None} # Suppress "WARNING: unknown mimetype for ..." https://github.com/IQSS/dataverse/issues/3391 suppress_warnings = ['epub.unknown_project_files'] rst_prolog = """ diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index e44a70a405f..2139b85c64a 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -34,7 +34,7 @@ On Linux, you are welcome to use the OpenJDK available from package managers. Install Netbeans or Maven ~~~~~~~~~~~~~~~~~~~~~~~~~ -NetBeans IDE is recommended, and can be downloaded from http://netbeans.org . Developers may use any editor or IDE. We recommend NetBeans because it is free, works cross platform, has good support for Jakarta EE projects, and includes a required build tool, Maven. +NetBeans IDE is recommended, and can be downloaded from https://netbeans.org . Developers may use any editor or IDE. We recommend NetBeans because it is free, works cross platform, has good support for Jakarta EE projects, and includes a required build tool, Maven. Below we describe how to build the Dataverse Software war file with Netbeans but if you prefer to use only Maven, you can find installation instructions in the :doc:`tools` section. @@ -74,7 +74,7 @@ On Mac, run this command: ``brew install jq`` -On Linux, install ``jq`` from your package manager or download a binary from http://stedolan.github.io/jq/ +On Linux, install ``jq`` from your package manager or download a binary from https://stedolan.github.io/jq/ Install Payara ~~~~~~~~~~~~~~ @@ -117,7 +117,7 @@ On Linux, you should just install PostgreSQL using your favorite package manager Install Solr ~~~~~~~~~~~~ -`Solr `_ 8.11.1 is required. +`Solr `_ 8.11.1 is required. To install Solr, execute the following commands: @@ -127,7 +127,7 @@ To install Solr, execute the following commands: ``cd /usr/local/solr`` -``curl -O http://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz`` +``curl -O https://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz`` ``tar xvfz solr-8.11.1.tgz`` diff --git a/doc/sphinx-guides/source/developers/documentation.rst b/doc/sphinx-guides/source/developers/documentation.rst index b20fd112533..46fc268461b 100755 --- a/doc/sphinx-guides/source/developers/documentation.rst +++ b/doc/sphinx-guides/source/developers/documentation.rst @@ -34,7 +34,7 @@ If you would like to read more about the Dataverse Project's use of GitHub, plea Building the Guides with Sphinx ------------------------------- -The Dataverse guides are written using Sphinx (http://sphinx-doc.org). We recommend installing Sphinx and building the guides locally so you can get an accurate preview of your changes. +The Dataverse guides are written using Sphinx (https://sphinx-doc.org). We recommend installing Sphinx and building the guides locally so you can get an accurate preview of your changes. Installing Sphinx ~~~~~~~~~~~~~~~~~ @@ -58,7 +58,7 @@ In some parts of the documentation, graphs are rendered as images using the Sphi Building the guides requires the ``dot`` executable from GraphViz. -This requires having `GraphViz `_ installed and either having ``dot`` on the path or +This requires having `GraphViz `_ installed and either having ``dot`` on the path or `adding options to the make call `_. Editing and Building the Guides @@ -67,7 +67,7 @@ Editing and Building the Guides To edit the existing documentation: - Create a branch (see :ref:`how-to-make-a-pull-request`). -- In ``doc/sphinx-guides/source`` you will find the .rst files that correspond to http://guides.dataverse.org. +- In ``doc/sphinx-guides/source`` you will find the .rst files that correspond to https://guides.dataverse.org. - Using your preferred text editor, open and edit the necessary files, or create new ones. Once you are done, open a terminal, change directories to ``doc/sphinx-guides``, activate (or reactivate) your Python virtual environment, and build the guides. diff --git a/doc/sphinx-guides/source/developers/intro.rst b/doc/sphinx-guides/source/developers/intro.rst index 7f4e8c1ba34..6469a43b5ab 100755 --- a/doc/sphinx-guides/source/developers/intro.rst +++ b/doc/sphinx-guides/source/developers/intro.rst @@ -2,7 +2,7 @@ Introduction ============ -Welcome! `The Dataverse Project `_ is an `open source `_ project that loves `contributors `_! +Welcome! `The Dataverse Project `_ is an `open source `_ project that loves `contributors `_! .. contents:: |toctitle| :local: @@ -19,7 +19,7 @@ To get started, you'll want to set up your :doc:`dev-environment` and make sure Getting Help ------------ -If you have any questions at all, please reach out to other developers via the channels listed in https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md such as http://chat.dataverse.org, the `dataverse-dev `_ mailing list, `community calls `_, or support@dataverse.org. +If you have any questions at all, please reach out to other developers via the channels listed in https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md such as https://chat.dataverse.org, the `dataverse-dev `_ mailing list, `community calls `_, or support@dataverse.org. .. _core-technologies: @@ -52,7 +52,7 @@ Related Guides If you are a developer who wants to make use of the Dataverse Software APIs, please see the :doc:`/api/index`. If you have front-end UI questions, please see the :doc:`/style/index`. -If you are a sysadmin who likes to code, you may be interested in hacking on installation scripts mentioned in the :doc:`/installation/index`. We validate the installation scripts with :doc:`/developers/tools` such as `Vagrant `_ and Docker (see the :doc:`containers` section). +If you are a sysadmin who likes to code, you may be interested in hacking on installation scripts mentioned in the :doc:`/installation/index`. We validate the installation scripts with :doc:`/developers/tools` such as `Vagrant `_ and Docker (see the :doc:`containers` section). Related Projects ---------------- diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 4b3d5fd0a55..132120291c2 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -46,7 +46,7 @@ The main takeaway should be that we care about unit testing enough to measure th Writing Unit Tests with JUnit ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -We are aware that there are newer testing tools such as TestNG, but we use `JUnit `_ because it's tried and true. +We are aware that there are newer testing tools such as TestNG, but we use `JUnit `_ because it's tried and true. We support both (legacy) JUnit 4.x tests (forming the majority of our tests) and newer JUnit 5 based testing. @@ -238,11 +238,11 @@ Remember, it’s only a test (and it's not graded)! Some guidelines to bear in m - Map out which logical functions you want to test - Understand what’s being tested and ensure it’s repeatable - Assert the conditions of success / return values for each operation - * A useful resource would be `HTTP status codes `_ + * A useful resource would be `HTTP status codes `_ - Let the code do the labor; automate everything that happens when you run your test file. - Just as with any development, if you’re stuck: ask for help! -To execute existing integration tests on your local Dataverse installation, a helpful command line tool to use is `Maven `_. You should have Maven installed as per the `Development Environment `_ guide, but if not it’s easily done via Homebrew: ``brew install maven``. +To execute existing integration tests on your local Dataverse installation, a helpful command line tool to use is `Maven `_. You should have Maven installed as per the `Development Environment `_ guide, but if not it’s easily done via Homebrew: ``brew install maven``. Once installed, you may run commands with ``mvn [options] [] []``. @@ -487,7 +487,7 @@ Future Work on Integration Tests - Automate testing of dataverse-client-python: https://github.com/IQSS/dataverse-client-python/issues/10 - Work with @leeper on testing the R client: https://github.com/IQSS/dataverse-client-r - Review and attempt to implement "API Test Checklist" from @kcondon at https://docs.google.com/document/d/199Oq1YwQ4pYCguaeW48bIN28QAitSk63NbPYxJHCCAE/edit?usp=sharing -- Generate code coverage reports for **integration** tests: https://github.com/pkainulainen/maven-examples/issues/3 and http://www.petrikainulainen.net/programming/maven/creating-code-coverage-reports-for-unit-and-integration-tests-with-the-jacoco-maven-plugin/ +- Generate code coverage reports for **integration** tests: https://github.com/pkainulainen/maven-examples/issues/3 and https://www.petrikainulainen.net/programming/maven/creating-code-coverage-reports-for-unit-and-integration-tests-with-the-jacoco-maven-plugin/ - Consistent logging of API Tests. Show test name at the beginning and end and status codes returned. - expected passing and known/expected failing integration tests: https://github.com/IQSS/dataverse/issues/4438 diff --git a/doc/sphinx-guides/source/developers/tools.rst b/doc/sphinx-guides/source/developers/tools.rst index cbd27d6e8d2..17673ae499e 100755 --- a/doc/sphinx-guides/source/developers/tools.rst +++ b/doc/sphinx-guides/source/developers/tools.rst @@ -43,20 +43,20 @@ On Windows if you see an error like ``/usr/bin/perl^M: bad interpreter`` you mig PlantUML ++++++++ -PlantUML is used to create diagrams in the guides and other places. Download it from http://plantuml.com and check out an example script at https://github.com/IQSS/dataverse/blob/v4.6.1/doc/Architecture/components.sh . Note that for this script to work, you'll need the ``dot`` program, which can be installed on Mac with ``brew install graphviz``. +PlantUML is used to create diagrams in the guides and other places. Download it from https://plantuml.com and check out an example script at https://github.com/IQSS/dataverse/blob/v4.6.1/doc/Architecture/components.sh . Note that for this script to work, you'll need the ``dot`` program, which can be installed on Mac with ``brew install graphviz``. Eclipse Memory Analyzer Tool (MAT) ++++++++++++++++++++++++++++++++++ The Memory Analyzer Tool (MAT) from Eclipse can help you analyze heap dumps, showing you "leak suspects" such as seen at https://github.com/payara/Payara/issues/350#issuecomment-115262625 -It can be downloaded from http://www.eclipse.org/mat +It can be downloaded from https://www.eclipse.org/mat If the heap dump provided to you was created with ``gcore`` (such as with ``gcore -o /tmp/app.core $app_pid``) rather than ``jmap``, you will need to convert the file before you can open it in MAT. Using ``app.core.13849`` as example of the original 33 GB file, here is how you could convert it into a 26 GB ``app.core.13849.hprof`` file. Please note that this operation took almost 90 minutes: ``/usr/java7/bin/jmap -dump:format=b,file=app.core.13849.hprof /usr/java7/bin/java app.core.13849`` -A file of this size may not "just work" in MAT. When you attempt to open it you may see something like "An internal error occurred during: "Parsing heap dump from '/tmp/heapdumps/app.core.13849.hprof'". Java heap space". If so, you will need to increase the memory allocated to MAT. On Mac OS X, this can be done by editing ``MemoryAnalyzer.app/Contents/MacOS/MemoryAnalyzer.ini`` and increasing the value "-Xmx1024m" until it's high enough to open the file. See also http://wiki.eclipse.org/index.php/MemoryAnalyzer/FAQ#Out_of_Memory_Error_while_Running_the_Memory_Analyzer +A file of this size may not "just work" in MAT. When you attempt to open it you may see something like "An internal error occurred during: "Parsing heap dump from '/tmp/heapdumps/app.core.13849.hprof'". Java heap space". If so, you will need to increase the memory allocated to MAT. On Mac OS X, this can be done by editing ``MemoryAnalyzer.app/Contents/MacOS/MemoryAnalyzer.ini`` and increasing the value "-Xmx1024m" until it's high enough to open the file. See also https://wiki.eclipse.org/index.php/MemoryAnalyzer/FAQ#Out_of_Memory_Error_while_Running_the_Memory_Analyzer PageKite ++++++++ @@ -73,7 +73,7 @@ The first time you run ``./pagekite.py`` a file at ``~/.pagekite.rc`` will be created. You can edit this file to configure PageKite to serve up port 8080 (the default app server HTTP port) or the port of your choosing. -According to https://pagekite.net/support/free-for-foss/ PageKite (very generously!) offers free accounts to developers writing software the meets http://opensource.org/docs/definition.php such as the Dataverse Project. +According to https://pagekite.net/support/free-for-foss/ PageKite (very generously!) offers free accounts to developers writing software the meets https://opensource.org/docs/definition.php such as the Dataverse Project. MSV +++ diff --git a/doc/sphinx-guides/source/developers/unf/index.rst b/doc/sphinx-guides/source/developers/unf/index.rst index 2423877348f..856de209e82 100644 --- a/doc/sphinx-guides/source/developers/unf/index.rst +++ b/doc/sphinx-guides/source/developers/unf/index.rst @@ -27,7 +27,7 @@ with Dataverse Software 2.0 and throughout the 3.* lifecycle, UNF v.5 UNF v.6. Two parallel implementation, in R and Java, will be available, for cross-validation. -Learn more: Micah Altman and Gary King. 2007. “A Proposed Standard for the Scholarly Citation of Quantitative Data.†D-Lib Magazine, 13. Publisher’s Version Copy at http://j.mp/2ovSzoT +Learn more: Micah Altman and Gary King. 2007. “A Proposed Standard for the Scholarly Citation of Quantitative Data.†D-Lib Magazine, 13. Publisher’s Version Copy at https://j.mp/2ovSzoT **Contents:** diff --git a/doc/sphinx-guides/source/developers/unf/unf-v3.rst b/doc/sphinx-guides/source/developers/unf/unf-v3.rst index 3f0018d7fa5..98c07b398e0 100644 --- a/doc/sphinx-guides/source/developers/unf/unf-v3.rst +++ b/doc/sphinx-guides/source/developers/unf/unf-v3.rst @@ -34,11 +34,11 @@ For example, the number pi at five digits is represented as -3.1415e+, and the n 1. Terminate character strings representing nonmissing values with a POSIX end-of-line character. -2. Encode each character string with `Unicode bit encoding `_. Versions 3 through 4 use UTF-32BE; Version 4.1 uses UTF-8. +2. Encode each character string with `Unicode bit encoding `_. Versions 3 through 4 use UTF-32BE; Version 4.1 uses UTF-8. 3. Combine the vector of character strings into a single sequence, with each character string separated by a POSIX end-of-line character and a null byte. -4. Compute a hash on the resulting sequence using the standard MD5 hashing algorithm for Version 3 and using `SHA256 `_ for Version 4. The resulting hash is `base64 `_ encoded to support readability. +4. Compute a hash on the resulting sequence using the standard MD5 hashing algorithm for Version 3 and using `SHA256 `_ for Version 4. The resulting hash is `base64 `_ encoded to support readability. 5. Calculate the UNF for each lower-level data object, using a consistent UNF version and level of precision across the individual UNFs being combined. @@ -49,4 +49,4 @@ For example, the number pi at five digits is represented as -3.1415e+, and the n 8. Combine UNFs from multiple variables to form a single UNF for an entire data frame, and then combine UNFs for a set of data frames to form a single UNF that represents an entire research study. Learn more: -Software for computing UNFs is available in an R Module, which includes a Windows standalone tool and code for Stata and SAS languages. Also see the following for more details: Micah Altman and Gary King. 2007. "A Proposed Standard for the Scholarly Citation of Quantitative Data," D-Lib Magazine, Vol. 13, No. 3/4 (March). (Abstract: `HTML `_ | Article: `PDF `_) +Software for computing UNFs is available in an R Module, which includes a Windows standalone tool and code for Stata and SAS languages. Also see the following for more details: Micah Altman and Gary King. 2007. "A Proposed Standard for the Scholarly Citation of Quantitative Data," D-Lib Magazine, Vol. 13, No. 3/4 (March). (Abstract: `HTML `_ | Article: `PDF `_) diff --git a/doc/sphinx-guides/source/developers/unf/unf-v6.rst b/doc/sphinx-guides/source/developers/unf/unf-v6.rst index 9648bae47c8..b2495ff3dd9 100644 --- a/doc/sphinx-guides/source/developers/unf/unf-v6.rst +++ b/doc/sphinx-guides/source/developers/unf/unf-v6.rst @@ -156,7 +156,7 @@ For example, to specify a non-default precision the parameter it is specified us | Allowed values are {``128`` , ``192`` , ``196`` , ``256``} with ``128`` being the default. | ``R1`` - **truncate** numeric values to ``N`` digits, **instead of rounding**, as previously described. -`Dr. Micah Altman's classic UNF v5 paper `_ mentions another optional parameter ``T###``, for specifying rounding of date and time values (implemented as stripping the values of entire components - fractional seconds, seconds, minutes, hours... etc., progressively) - but it doesn't specify its syntax. It is left as an exercise for a curious reader to contact the author and work out the details, if so desired. (Not implemented in UNF Version 6 by the Dataverse Project). +`Dr. Micah Altman's classic UNF v5 paper `_ mentions another optional parameter ``T###``, for specifying rounding of date and time values (implemented as stripping the values of entire components - fractional seconds, seconds, minutes, hours... etc., progressively) - but it doesn't specify its syntax. It is left as an exercise for a curious reader to contact the author and work out the details, if so desired. (Not implemented in UNF Version 6 by the Dataverse Project). Note: we do not recommend truncating character strings at fewer bytes than the default ``128`` (the ``X`` parameter). At the very least this number **must** be high enough so that the printable UNFs of individual variables or files are not truncated, when calculating combined UNFs of files or datasets, respectively. diff --git a/doc/sphinx-guides/source/developers/version-control.rst b/doc/sphinx-guides/source/developers/version-control.rst index aacc245af5a..31fc0a4e602 100644 --- a/doc/sphinx-guides/source/developers/version-control.rst +++ b/doc/sphinx-guides/source/developers/version-control.rst @@ -24,7 +24,7 @@ The goals of the Dataverse Software branching strategy are: - allow for concurrent development - only ship stable code -We follow a simplified "git flow" model described at http://nvie.com/posts/a-successful-git-branching-model/ involving a "master" branch, a "develop" branch, and feature branches such as "1234-bug-fix". +We follow a simplified "git flow" model described at https://nvie.com/posts/a-successful-git-branching-model/ involving a "master" branch, a "develop" branch, and feature branches such as "1234-bug-fix". Branches ~~~~~~~~ diff --git a/doc/sphinx-guides/source/index.rst b/doc/sphinx-guides/source/index.rst index f7e81756e5b..37bb2353ff7 100755 --- a/doc/sphinx-guides/source/index.rst +++ b/doc/sphinx-guides/source/index.rst @@ -42,7 +42,7 @@ Other Resources Additional information about the Dataverse Project itself including presentations, information about upcoming releases, data management and citation, and announcements can be found at -`http://dataverse.org/ `__ +`https://dataverse.org/ `__ **User Group** @@ -65,7 +65,7 @@ The support email address is `support@dataverse.org `__ -or use `GitHub pull requests `__, +or use `GitHub pull requests `__, if you have some code, scripts or documentation that you'd like to share. If you have a **security issue** to report, please email `security@dataverse.org `__. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index f2de9d5702f..0edb09784e1 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -112,7 +112,7 @@ The need to redirect port HTTP (port 80) to HTTPS (port 443) for security has al Your decision to proxy or not should primarily be driven by which features of the Dataverse Software you'd like to use. If you'd like to use Shibboleth, the decision is easy because proxying or "fronting" Payara with Apache is required. The details are covered in the :doc:`shibboleth` section. -Even if you have no interest in Shibboleth, you may want to front your Dataverse installation with Apache or nginx to simply the process of installing SSL certificates. There are many tutorials on the Internet for adding certs to Apache, including a some `notes used by the Dataverse Project team `_, but the process of adding a certificate to Payara is arduous and not for the faint of heart. The Dataverse Project team cannot provide much help with adding certificates to Payara beyond linking to `tips `_ on the web. +Even if you have no interest in Shibboleth, you may want to front your Dataverse installation with Apache or nginx to simply the process of installing SSL certificates. There are many tutorials on the Internet for adding certs to Apache, including a some `notes used by the Dataverse Project team `_, but the process of adding a certificate to Payara is arduous and not for the faint of heart. The Dataverse Project team cannot provide much help with adding certificates to Payara beyond linking to `tips `_ on the web. Still not convinced you should put Payara behind another web server? Even if you manage to get your SSL certificate into Payara, how are you going to run Payara on low ports such as 80 and 443? Are you going to run Payara as root? Bad idea. This is a security risk. Under "Additional Recommendations" under "Securing Your Installation" above you are advised to configure Payara to run as a user other than root. @@ -124,7 +124,7 @@ If you really don't want to front Payara with any proxy (not recommended), you c ``./asadmin set server-config.network-config.network-listeners.network-listener.http-listener-2.port=443`` -What about port 80? Even if you don't front your Dataverse installation with Apache, you may want to let Apache run on port 80 just to rewrite HTTP to HTTPS as described above. You can use a similar command as above to change the HTTP port that Payara uses from 8080 to 80 (substitute ``http-listener-1.port=80``). Payara can be used to enforce HTTPS on its own without Apache, but configuring this is an exercise for the reader. Answers here may be helpful: http://stackoverflow.com/questions/25122025/glassfish-v4-java-7-port-unification-error-not-able-to-redirect-http-to +What about port 80? Even if you don't front your Dataverse installation with Apache, you may want to let Apache run on port 80 just to rewrite HTTP to HTTPS as described above. You can use a similar command as above to change the HTTP port that Payara uses from 8080 to 80 (substitute ``http-listener-1.port=80``). Payara can be used to enforce HTTPS on its own without Apache, but configuring this is an exercise for the reader. Answers here may be helpful: https://stackoverflow.com/questions/25122025/glassfish-v4-java-7-port-unification-error-not-able-to-redirect-http-to If you are running an installation with Apache and Payara on the same server, and would like to restrict Payara from responding to any requests to port 8080 from external hosts (in other words, not through Apache), you can restrict the AJP listener to localhost only with: @@ -157,7 +157,7 @@ and restart Payara. The prefix can be configured via the API (where it is referr Once this is done, you will be able to publish datasets and files, but the persistent identifiers will not be citable, and they will only resolve from the DataCite test environment (and then only if the Dataverse installation from which you published them is accessible - DOIs minted from your laptop will not resolve). Note that any datasets or files created using the test configuration cannot be directly migrated and would need to be created again once a valid DOI namespace is configured. -To properly configure persistent identifiers for a production installation, an account and associated namespace must be acquired for a fee from a DOI or HDL provider. **DataCite** (https://www.datacite.org) is the recommended DOI provider (see https://dataversecommunity.global for more on joining DataCite) but **EZID** (http://ezid.cdlib.org) is an option for the University of California according to https://www.cdlib.org/cdlinfo/2017/08/04/ezid-doi-service-is-evolving/ . **Handle.Net** (https://www.handle.net) is the HDL provider. +To properly configure persistent identifiers for a production installation, an account and associated namespace must be acquired for a fee from a DOI or HDL provider. **DataCite** (https://www.datacite.org) is the recommended DOI provider (see https://dataversecommunity.global for more on joining DataCite) but **EZID** (https://ezid.cdlib.org) is an option for the University of California according to https://www.cdlib.org/cdlinfo/2017/08/04/ezid-doi-service-is-evolving/ . **Handle.Net** (https://www.handle.net) is the HDL provider. Once you have your DOI or Handle account credentials and a namespace, configure your Dataverse installation to use them using the JVM options and database settings below. @@ -205,7 +205,7 @@ Here are the configuration options for handles: - :ref:`:IndependentHandleService <:IndependentHandleService>` (optional) - :ref:`:HandleAuthHandle <:HandleAuthHandle>` (optional) -Note: If you are **minting your own handles** and plan to set up your own handle service, please refer to `Handle.Net documentation `_. +Note: If you are **minting your own handles** and plan to set up your own handle service, please refer to `Handle.Net documentation `_. .. _auth-modes: @@ -288,7 +288,7 @@ Multiple file stores should specify different directories (which would nominally Swift Storage +++++++++++++ -Rather than storing data files on the filesystem, you can opt for an experimental setup with a `Swift Object Storage `_ backend. Each dataset that users create gets a corresponding "container" on the Swift side, and each data file is saved as a file within that container. +Rather than storing data files on the filesystem, you can opt for an experimental setup with a `Swift Object Storage `_ backend. Each dataset that users create gets a corresponding "container" on the Swift side, and each data file is saved as a file within that container. **In order to configure a Swift installation,** you need to complete these steps to properly modify the JVM options: @@ -304,7 +304,7 @@ First, run all the following create commands with your Swift endpoint informatio ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files..username.endpoint1=your-username" ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files..endpoint.endpoint1=your-swift-endpoint" -``auth_type`` can either be ``keystone``, ``keystone_v3``, or it will assumed to be ``basic``. ``auth_url`` should be your keystone authentication URL which includes the tokens (e.g. for keystone, ``https://openstack.example.edu:35357/v2.0/tokens`` and for keystone_v3, ``https://openstack.example.edu:35357/v3/auth/tokens``). ``swift_endpoint`` is a URL that looks something like ``http://rdgw.swift.example.org/swift/v1``. +``auth_type`` can either be ``keystone``, ``keystone_v3``, or it will assumed to be ``basic``. ``auth_url`` should be your keystone authentication URL which includes the tokens (e.g. for keystone, ``https://openstack.example.edu:35357/v2.0/tokens`` and for keystone_v3, ``https://openstack.example.edu:35357/v3/auth/tokens``). ``swift_endpoint`` is a URL that looks something like ``https://rdgw.swift.example.org/swift/v1``. Then create a password alias by running (without changes): @@ -400,7 +400,7 @@ You'll need an AWS account with an associated S3 bucket for your installation to **Make note** of the **bucket's name** and the **region** its data is hosted in. To **create a user** with full S3 access and nothing more for security reasons, we recommend using IAM -(Identity and Access Management). See `IAM User Guide `_ +(Identity and Access Management). See `IAM User Guide `_ for more info on this process. **Generate the user keys** needed for a Dataverse installation afterwards by clicking on the created user. @@ -410,7 +410,7 @@ for more info on this process. If you are hosting your Dataverse installation on an AWS EC2 instance alongside storage in S3, it is possible to use IAM Roles instead of the credentials file (the file at ``~/.aws/credentials`` mentioned below). Please note that you will still need the ``~/.aws/config`` file to specify the region. For more information on this option, see - http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html + https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html Preparation When Using Custom S3-Compatible Service ################################################### @@ -471,7 +471,7 @@ Additional profiles can be added to these files by appending the relevant inform aws_access_key_id = aws_secret_access_key = -Place these two files in a folder named ``.aws`` under the home directory for the user running your Dataverse Installation on Payara. (From the `AWS Command Line Interface Documentation `_: +Place these two files in a folder named ``.aws`` under the home directory for the user running your Dataverse Installation on Payara. (From the `AWS Command Line Interface Documentation `_: "In order to separate credentials from less sensitive options, region and output format are stored in a separate file named config in the same folder") @@ -598,7 +598,7 @@ You may provide the values for these via any of the Reported Working S3-Compatible Storage ###################################### -`Minio v2018-09-12 `_ +`Minio v2018-09-12 `_ Set ``dataverse.files..path-style-access=true``, as Minio works path-based. Works pretty smooth, easy to setup. **Can be used for quick testing, too:** just use the example values above. Uses the public (read: unsecure and possibly slow) https://play.minio.io:9000 service. @@ -2063,7 +2063,7 @@ Note: by default, the URL is composed from the settings ``:GuidesBaseUrl`` and ` :GuidesBaseUrl ++++++++++++++ -Set ``:GuidesBaseUrl`` to override the default value "http://guides.dataverse.org". If you are interested in writing your own version of the guides, you may find the :doc:`/developers/documentation` section of the Developer Guide helpful. +Set ``:GuidesBaseUrl`` to override the default value "https://guides.dataverse.org". If you are interested in writing your own version of the guides, you may find the :doc:`/developers/documentation` section of the Developer Guide helpful. ``curl -X PUT -d http://dataverse.example.edu http://localhost:8080/api/admin/settings/:GuidesBaseUrl`` @@ -2084,14 +2084,14 @@ Set ``:NavbarSupportUrl`` to a fully-qualified URL which will be used for the "S Note that this will override the default behaviour for the "Support" menu option, which is to display the Dataverse collection 'feedback' dialog. -``curl -X PUT -d http://dataverse.example.edu/supportpage.html http://localhost:8080/api/admin/settings/:NavbarSupportUrl`` +``curl -X PUT -d https://dataverse.example.edu/supportpage.html http://localhost:8080/api/admin/settings/:NavbarSupportUrl`` :MetricsUrl +++++++++++ Make the metrics component on the root Dataverse collection a clickable link to a website where you present metrics on your Dataverse installation, perhaps one of the community-supported tools mentioned in the :doc:`/admin/reporting-tools-and-queries` section of the Admin Guide. -``curl -X PUT -d http://metrics.dataverse.example.edu http://localhost:8080/api/admin/settings/:MetricsUrl`` +``curl -X PUT -d https://metrics.dataverse.example.edu http://localhost:8080/api/admin/settings/:MetricsUrl`` .. _:MaxFileUploadSizeInBytes: diff --git a/doc/sphinx-guides/source/installation/installation-main.rst b/doc/sphinx-guides/source/installation/installation-main.rst index 4b000f1ef9e..5cb6e7153d4 100755 --- a/doc/sphinx-guides/source/installation/installation-main.rst +++ b/doc/sphinx-guides/source/installation/installation-main.rst @@ -98,7 +98,7 @@ The supplied site URL will be saved under the JVM option :ref:`dataverse.siteUrl The Dataverse Software uses JHOVE_ to help identify the file format (CSV, PNG, etc.) for files that users have uploaded. The installer places files called ``jhove.conf`` and ``jhoveConfig.xsd`` into the directory ``/usr/local/payara5/glassfish/domains/domain1/config`` by default and makes adjustments to the jhove.conf file based on the directory into which you chose to install Payara. -.. _JHOVE: http://jhove.openpreservation.org +.. _JHOVE: https://jhove.openpreservation.org Logging In ---------- @@ -118,7 +118,7 @@ Use the following credentials to log in: - username: dataverseAdmin - password: admin -Congratulations! You have a working Dataverse installation. Soon you'll be tweeting at `@dataverseorg `_ asking to be added to the map at http://dataverse.org :) +Congratulations! You have a working Dataverse installation. Soon you'll be tweeting at `@dataverseorg `_ asking to be added to the map at https://dataverse.org :) Trouble? See if you find an answer in the troubleshooting section below. @@ -197,7 +197,7 @@ Be sure you save the changes made here and then restart your Payara server to te UnknownHostException While Deploying ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -If you are seeing "Caused by: java.net.UnknownHostException: myhost: Name or service not known" in server.log and your hostname is "myhost" the problem is likely that "myhost" doesn't appear in ``/etc/hosts``. See also http://stackoverflow.com/questions/21817809/glassfish-exception-during-deployment-project-with-stateful-ejb/21850873#21850873 +If you are seeing "Caused by: java.net.UnknownHostException: myhost: Name or service not known" in server.log and your hostname is "myhost" the problem is likely that "myhost" doesn't appear in ``/etc/hosts``. See also https://stackoverflow.com/questions/21817809/glassfish-exception-during-deployment-project-with-stateful-ejb/21850873#21850873 .. _fresh-reinstall: diff --git a/doc/sphinx-guides/source/installation/intro.rst b/doc/sphinx-guides/source/installation/intro.rst index 2251af7b81b..e5b10883d4b 100644 --- a/doc/sphinx-guides/source/installation/intro.rst +++ b/doc/sphinx-guides/source/installation/intro.rst @@ -2,7 +2,7 @@ Introduction ============ -Welcome! Thanks for installing `The Dataverse Project `_! +Welcome! Thanks for installing `The Dataverse Project `_! .. contents:: |toctitle| :local: @@ -36,7 +36,7 @@ Getting Help To get help installing or configuring a Dataverse installation, please try one or more of: - posting to the `dataverse-community `_ Google Group. -- asking at http://chat.dataverse.org +- asking at https://chat.dataverse.org - emailing support@dataverse.org to open a private ticket at https://help.hmdc.harvard.edu Information to Send to Support When Installation Fails diff --git a/doc/sphinx-guides/source/installation/oauth2.rst b/doc/sphinx-guides/source/installation/oauth2.rst index 0dfdb0393e0..cd765c91b7f 100644 --- a/doc/sphinx-guides/source/installation/oauth2.rst +++ b/doc/sphinx-guides/source/installation/oauth2.rst @@ -11,7 +11,7 @@ As explained under "Auth Modes" in the :doc:`config` section, OAuth2 is one of t `OAuth2 `_ is an authentication protocol that allows systems to share user data, while letting the users control what data is being shared. When you see buttons stating "login with Google" or "login through Facebook", OAuth2 is probably involved. For the purposes of this section, we will shorten "OAuth2" to just "OAuth." OAuth can be compared and contrasted with :doc:`shibboleth`. -The Dataverse Software supports four OAuth providers: `ORCID `_, `Microsoft Azure Active Directory (AD) `_, `GitHub `_, and `Google `_. +The Dataverse Software supports four OAuth providers: `ORCID `_, `Microsoft Azure Active Directory (AD) `_, `GitHub `_, and `Google `_. In addition :doc:`oidc` are supported, using a standard based on OAuth2. diff --git a/doc/sphinx-guides/source/installation/oidc.rst b/doc/sphinx-guides/source/installation/oidc.rst index a40ef758dc7..ee154ca9b9c 100644 --- a/doc/sphinx-guides/source/installation/oidc.rst +++ b/doc/sphinx-guides/source/installation/oidc.rst @@ -51,7 +51,7 @@ Just like with :doc:`oauth2` you need to obtain a *Client ID* and a *Client Secr You need to apply for credentials out-of-band. The Dataverse installation will discover all necessary metadata for a given provider on its own (this is `part of the standard -`_). +`_). To enable this, you need to specify an *Issuer URL* when creating the configuration for your provider (see below). diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 3cf876a2251..7d458bbc37b 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -26,7 +26,7 @@ Installing Java The Dataverse Software should run fine with only the Java Runtime Environment (JRE) installed, but installing the Java Development Kit (JDK) is recommended so that useful tools for troubleshooting production environments are available. We recommend using Oracle JDK or OpenJDK. -The Oracle JDK can be downloaded from http://www.oracle.com/technetwork/java/javase/downloads/index.html +The Oracle JDK can be downloaded from https://www.oracle.com/technetwork/java/javase/downloads/index.html On a RHEL/derivative, install OpenJDK (devel version) using yum:: @@ -261,7 +261,7 @@ Installing jq or you may install it manually:: # cd /usr/bin - # wget http://stedolan.github.io/jq/download/linux64/jq + # wget https://stedolan.github.io/jq/download/linux64/jq # chmod +x jq # jq --version diff --git a/doc/sphinx-guides/source/installation/shibboleth.rst b/doc/sphinx-guides/source/installation/shibboleth.rst index cd0fbda77a6..3a2e1b99c70 100644 --- a/doc/sphinx-guides/source/installation/shibboleth.rst +++ b/doc/sphinx-guides/source/installation/shibboleth.rst @@ -76,7 +76,7 @@ A ``jk-connector`` network listener should have already been set up when you ran You can verify this with ``./asadmin list-network-listeners``. -This enables the `AJP protocol `_ used in Apache configuration files below. +This enables the `AJP protocol `_ used in Apache configuration files below. SSLEngine Warning Workaround ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -93,7 +93,7 @@ Configure Apache Enforce HTTPS ~~~~~~~~~~~~~ -To prevent attacks such as `FireSheep `_, HTTPS should be enforced. https://wiki.apache.org/httpd/RewriteHTTPToHTTPS provides a good method. You **could** copy and paste that those "rewrite rule" lines into Apache's main config file at ``/etc/httpd/conf/httpd.conf`` but using Apache's "virtual hosts" feature is recommended so that you can leave the main configuration file alone and drop a host-specific file into place. +To prevent attacks such as `FireSheep `_, HTTPS should be enforced. https://wiki.apache.org/httpd/RewriteHTTPToHTTPS provides a good method. You **could** copy and paste that those "rewrite rule" lines into Apache's main config file at ``/etc/httpd/conf/httpd.conf`` but using Apache's "virtual hosts" feature is recommended so that you can leave the main configuration file alone and drop a host-specific file into place. Below is an example of how "rewrite rule" lines look within a ``VirtualHost`` block. Download a :download:`sample file <../_static/installation/files/etc/httpd/conf.d/dataverse.example.edu.conf>` , edit it to substitute your own hostname under ``ServerName``, and place it at ``/etc/httpd/conf.d/dataverse.example.edu.conf`` or a filename that matches your hostname. The file must be in ``/etc/httpd/conf.d`` and must end in ".conf" to be included in Apache's configuration. @@ -235,7 +235,7 @@ Run semodule Silent is golden. No output is expected. This will place a file in ``/etc/selinux/targeted/modules/active/modules/shibboleth.pp`` and include "shibboleth" in the output of ``semodule -l``. See the ``semodule`` man page if you ever want to remove or disable the module you just added. -Congrats! You've made the creator of http://stopdisablingselinux.com proud. :) +Congrats! You've made the creator of https://stopdisablingselinux.com proud. :) Restart Apache and Shibboleth ----------------------------- diff --git a/doc/sphinx-guides/source/style/foundations.rst b/doc/sphinx-guides/source/style/foundations.rst index 31e0c314a05..cc193666868 100755 --- a/doc/sphinx-guides/source/style/foundations.rst +++ b/doc/sphinx-guides/source/style/foundations.rst @@ -9,7 +9,7 @@ Foundation elements are the very basic building blocks to create a page in Datav Grid Layout =========== -`Bootstrap `__ provides a responsive, fluid, 12-column grid system that we use to organize our page layouts. +`Bootstrap `__ provides a responsive, fluid, 12-column grid system that we use to organize our page layouts. We use the fixed-width ``.container`` class which provides responsive widths (i.e. auto, 750px, 970px or 1170px) based on media queries for the page layout, with a series of rows and columns for the content. @@ -42,7 +42,7 @@ The grid layout uses ``.col-sm-*`` classes for horizontal groups of columns, ins Typography ========== -The typeface, text size, and line-height are set in the `Bootstrap CSS `__. We use Bootstrap's global default ``font-size`` of **14px**, with a ``line-height`` of **1.428**, which is applied to the ```` and all paragraphs. +The typeface, text size, and line-height are set in the `Bootstrap CSS `__. We use Bootstrap's global default ``font-size`` of **14px**, with a ``line-height`` of **1.428**, which is applied to the ```` and all paragraphs. .. code-block:: css @@ -57,7 +57,7 @@ The typeface, text size, and line-height are set in the `Bootstrap CSS `__. It provides the background, border, text and link colors used across the application. +The default color palette is set in the `Bootstrap CSS `__. It provides the background, border, text and link colors used across the application. Brand Colors @@ -138,7 +138,7 @@ We use our brand color, a custom burnt orange ``{color:#C55B28;}``, which is set Text Colors ----------- -Text color is the default setting from `Bootstrap CSS `__. +Text color is the default setting from `Bootstrap CSS `__. .. code-block:: css @@ -163,7 +163,7 @@ Text color is the default setting from `Bootstrap CSS `__. The hover state color is set to 15% darker. +Link color is the default setting from `Bootstrap CSS `__. The hover state color is set to 15% darker. **Please note**, there is a CSS override issue with the link color due to the use of both a Bootstrap stylesheet and a PrimeFaces stylesheet in the UI. We've added CSS such as ``.ui-widget-content a {color: #428BCA;}`` to our stylesheet to keep the link color consistent. @@ -204,7 +204,7 @@ Link color is the default setting from `Bootstrap CSS `__ can be used to style background and text colors. Semantic colors include various colors assigned to meaningful contextual values. We convey meaning through color with a handful of emphasis utility classes. +Contextual classes from `Bootstrap CSS `__ can be used to style background and text colors. Semantic colors include various colors assigned to meaningful contextual values. We convey meaning through color with a handful of emphasis utility classes. .. raw:: html @@ -259,7 +259,7 @@ We use various icons across the application, which we get from Bootstrap, FontCu Bootstrap Glyphicons -------------------- -There are over 250 glyphs in font format from the Glyphicon Halflings set provided by `Bootstrap `__. We utilize these mainly as icons inside of buttons and in message blocks. +There are over 250 glyphs in font format from the Glyphicon Halflings set provided by `Bootstrap `__. We utilize these mainly as icons inside of buttons and in message blocks. .. raw:: html @@ -305,7 +305,7 @@ The :doc:`/developers/fontcustom` section of the Developer Guide explains how to Socicon Icon Font ----------------- -We use `Socicon `__ for our custom social icons. In the footer we use icons for Twitter and Github. In our Share feature, we also use custom social icons to allow users to select from a list of social media channels. +We use `Socicon `__ for our custom social icons. In the footer we use icons for Twitter and Github. In our Share feature, we also use custom social icons to allow users to select from a list of social media channels. .. raw:: html diff --git a/doc/sphinx-guides/source/style/patterns.rst b/doc/sphinx-guides/source/style/patterns.rst index e96f17dc2ec..c6602ffa26e 100644 --- a/doc/sphinx-guides/source/style/patterns.rst +++ b/doc/sphinx-guides/source/style/patterns.rst @@ -1,7 +1,7 @@ Patterns ++++++++ -Patterns are what emerge when using the foundation elements together with basic objects like buttons and alerts, more complex Javascript components from `Bootstrap `__ like tooltips and dropdowns, and AJAX components from `PrimeFaces `__ like datatables and commandlinks. +Patterns are what emerge when using the foundation elements together with basic objects like buttons and alerts, more complex Javascript components from `Bootstrap `__ like tooltips and dropdowns, and AJAX components from `PrimeFaces `__ like datatables and commandlinks. .. contents:: |toctitle| :local: @@ -9,7 +9,7 @@ Patterns are what emerge when using the foundation elements together with basic Navbar ====== -The `Navbar component `__ from Bootstrap spans the top of the application and contains the logo/branding, aligned to the left, plus search form and links, aligned to the right. +The `Navbar component `__ from Bootstrap spans the top of the application and contains the logo/branding, aligned to the left, plus search form and links, aligned to the right. When logged in, the account name is a dropdown menu, linking the user to account-specific content and the log out link. @@ -74,7 +74,7 @@ When logged in, the account name is a dropdown menu, linking the user to account Breadcrumbs =========== -The breadcrumbs are displayed under the header, and provide a trail of links for users to navigate the hierarchy of containing objects, from file to dataset to Dataverse collection. It utilizes a JSF `repeat component `_ to iterate through the breadcrumbs. +The breadcrumbs are displayed under the header, and provide a trail of links for users to navigate the hierarchy of containing objects, from file to dataset to Dataverse collection. It utilizes a JSF `repeat component `_ to iterate through the breadcrumbs. .. raw:: html @@ -108,7 +108,7 @@ The breadcrumbs are displayed under the header, and provide a trail of links for Tables ====== -Most tables use the `DataTable components `__ from PrimeFaces and are styled using the `Tables component `__ from Bootstrap. +Most tables use the `DataTable components `__ from PrimeFaces and are styled using the `Tables component `__ from Bootstrap. .. raw:: html @@ -187,7 +187,7 @@ Most tables use the `DataTable components `__ from Bootstrap. Form elements like the `InputText component `__ from PrimeFaces are kept looking clean and consistent across each page. +Forms fulfill various functions across the site, but we try to style them consistently. We use the ``.form-horizontal`` layout, which uses ``.form-group`` to create a grid of rows for the labels and inputs. The consistent style of forms is maintained using the `Forms component `__ from Bootstrap. Form elements like the `InputText component `__ from PrimeFaces are kept looking clean and consistent across each page. .. raw:: html @@ -289,7 +289,7 @@ Here are additional form elements that are common across many pages, including r Buttons ======= -There are various types of buttons for various actions, so we have many components to use, including the `CommandButton component `__ and `CommandLink component `__ from PrimeFaces, as well as the basic JSF `Link component `__ and `OutputLink component `__. Those are styled using the `Buttons component `__, `Button Groups component `__ and `Buttons Dropdowns component `__ from Bootstrap. +There are various types of buttons for various actions, so we have many components to use, including the `CommandButton component `__ and `CommandLink component `__ from PrimeFaces, as well as the basic JSF `Link component `__ and `OutputLink component `__. Those are styled using the `Buttons component `__, `Button Groups component `__ and `Buttons Dropdowns component `__ from Bootstrap. Action Buttons -------------- @@ -668,7 +668,7 @@ Another variation of icon-only buttons uses the ``.btn-link`` style class from B Pagination ========== -We use the `Pagination component `__ from Bootstrap for paging through search results. +We use the `Pagination component `__ from Bootstrap for paging through search results. .. raw:: html @@ -738,7 +738,7 @@ We use the `Pagination component `__ from Bootstrap is used for publication status (DRAFT, In Review, Unpublished, Deaccessioned), and Dataset version, as well as Tabular Data Tags (Survey, Time Series, Panel, Event, Genomics, Network, Geospatial). +The `Labels component `__ from Bootstrap is used for publication status (DRAFT, In Review, Unpublished, Deaccessioned), and Dataset version, as well as Tabular Data Tags (Survey, Time Series, Panel, Event, Genomics, Network, Geospatial). .. raw:: html @@ -768,7 +768,7 @@ The `Labels component `__ from Boots Alerts ====== -For our help/information, success, warning, and error message blocks we use a custom built UI component based on the `Alerts component `__ from Bootstrap. +For our help/information, success, warning, and error message blocks we use a custom built UI component based on the `Alerts component `__ from Bootstrap. .. raw:: html @@ -859,9 +859,9 @@ Style classes can be added to ``p``, ``div``, ``span`` and other elements to add Images ====== -For images, we use the `GraphicImage component `__ from PrimeFaces, or the basic JSF `GraphicImage component `__. +For images, we use the `GraphicImage component `__ from PrimeFaces, or the basic JSF `GraphicImage component `__. -To display images in a responsive way, they are styled with ``.img-responsive``, an `Images CSS class `__ from Bootstrap. +To display images in a responsive way, they are styled with ``.img-responsive``, an `Images CSS class `__ from Bootstrap. .. raw:: html @@ -879,7 +879,7 @@ To display images in a responsive way, they are styled with ``.img-responsive``, Panels ====== -The most common of our containers, the `Panels component `__ from Bootstrap is used to add a border and padding around sections of content like metadata blocks. Displayed with a header and/or footer, it can also be used with the `Collapse plugin `__ from Bootstrap. +The most common of our containers, the `Panels component `__ from Bootstrap is used to add a border and padding around sections of content like metadata blocks. Displayed with a header and/or footer, it can also be used with the `Collapse plugin `__ from Bootstrap. .. raw:: html @@ -943,7 +943,7 @@ Tabs Tabs are used to provide content panes on a page that allow the user to view different sections of content without navigating to a different page. -We use the `TabView component `__ from PrimeFaces, which is styled using the `Tab component `__ from Bootstrap. +We use the `TabView component `__ from PrimeFaces, which is styled using the `Tab component `__ from Bootstrap. .. raw:: html @@ -989,7 +989,7 @@ Modals are dialog prompts that act as popup overlays, but don't create a new bro Buttons usually provide the UI prompt. A user clicks the button, which then opens a `Dialog component `__ or `Confirm Dialog component `__ from PrimeFaces that displays the modal with the necessary information and actions to take. -The modal is styled using the `Modal component `__ from Bootstrap, for a popup window that prompts a user for information, with overlay and a backdrop, then header, content, and buttons. We can use style classes from Bootstrap for large (``.bs-example-modal-lg``) and small (``.bs-example-modal-sm``) width options. +The modal is styled using the `Modal component `__ from Bootstrap, for a popup window that prompts a user for information, with overlay and a backdrop, then header, content, and buttons. We can use style classes from Bootstrap for large (``.bs-example-modal-lg``) and small (``.bs-example-modal-sm``) width options. .. raw:: html diff --git a/doc/sphinx-guides/source/user/account.rst b/doc/sphinx-guides/source/user/account.rst index 12cc54c7fde..792fad730cf 100755 --- a/doc/sphinx-guides/source/user/account.rst +++ b/doc/sphinx-guides/source/user/account.rst @@ -109,7 +109,7 @@ If you are leaving your institution and need to convert your Dataverse installat ORCID Log In ~~~~~~~~~~~~~ -You can set up your Dataverse installation account to allow you to log in using your ORCID credentials. ORCID® is an independent non-profit effort to provide an open registry of unique researcher identifiers and open services to link research activities and organizations to these identifiers. Learn more at `orcid.org `_. +You can set up your Dataverse installation account to allow you to log in using your ORCID credentials. ORCID® is an independent non-profit effort to provide an open registry of unique researcher identifiers and open services to link research activities and organizations to these identifiers. Learn more at `orcid.org `_. Create a Dataverse installation account using ORCID ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/sphinx-guides/source/user/appendix.rst b/doc/sphinx-guides/source/user/appendix.rst index b05459b6aaf..ae0ec37aff3 100755 --- a/doc/sphinx-guides/source/user/appendix.rst +++ b/doc/sphinx-guides/source/user/appendix.rst @@ -22,13 +22,13 @@ Supported Metadata Detailed below are what metadata schemas we support for Citation and Domain Specific Metadata in the Dataverse Project: -- `Citation Metadata `__ (`see .tsv version `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 3.1 `__, and Dublin Core's `DCMI Metadata Terms `__ . Language field uses `ISO 639-1 `__ controlled vocabulary. -- `Geospatial Metadata `__ (`see .tsv version `__): compliant with DDI Lite, DDI 2.5 Codebook, DataCite, and Dublin Core. Country / Nation field uses `ISO 3166-1 `_ controlled vocabulary. +- `Citation Metadata `__ (`see .tsv version `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 3.1 `__, and Dublin Core's `DCMI Metadata Terms `__ . Language field uses `ISO 639-1 `__ controlled vocabulary. +- `Geospatial Metadata `__ (`see .tsv version `__): compliant with DDI Lite, DDI 2.5 Codebook, DataCite, and Dublin Core. Country / Nation field uses `ISO 3166-1 `_ controlled vocabulary. - `Social Science & Humanities Metadata `__ (`see .tsv version `__): compliant with DDI Lite, DDI 2.5 Codebook, and Dublin Core. - `Astronomy and Astrophysics Metadata `__ (`see .tsv version `__): These metadata elements can be mapped/exported to the International Virtual Observatory Alliance’s (IVOA) - `VOResource Schema format `__ and is based on - `Virtual Observatory (VO) Discovery and Provenance Metadata `__. -- `Life Sciences Metadata `__ (`see .tsv version `__): based on `ISA-Tab Specification `__, along with controlled vocabulary from subsets of the `OBI Ontology `__ and the `NCBI Taxonomy for Organisms `__. + `VOResource Schema format `__ and is based on + `Virtual Observatory (VO) Discovery and Provenance Metadata `__. +- `Life Sciences Metadata `__ (`see .tsv version `__): based on `ISA-Tab Specification `__, along with controlled vocabulary from subsets of the `OBI Ontology `__ and the `NCBI Taxonomy for Organisms `__. - `Journal Metadata `__ (`see .tsv version `__): based on the `Journal Archiving and Interchange Tag Set, version 1.2 `__. Experimental Metadata diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 77a760ef838..a3637154050 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -192,7 +192,7 @@ Additional download options available for tabular data (found in the same drop-d - As tab-delimited data (with the variable names in the first row); - The original file uploaded by the user; - Saved as R data (if the original file was not in R format); -- Variable Metadata (as a `DDI Codebook `_ XML file); +- Variable Metadata (as a `DDI Codebook `_ XML file); - Data File Citation (currently in either RIS, EndNote XML, or BibTeX format); - All of the above, as a zipped bundle. @@ -297,7 +297,7 @@ You can also search for files within datasets that have been tagged as "Workflow Astronomy (FITS) ---------------- -Metadata found in the header section of `Flexible Image Transport System (FITS) files `_ are automatically extracted by the Dataverse Software, aggregated and displayed in the Astronomy Domain-Specific Metadata of the Dataset that the file belongs to. This FITS file metadata, is therefore searchable and browsable (facets) at the Dataset-level. +Metadata found in the header section of `Flexible Image Transport System (FITS) files `_ are automatically extracted by the Dataverse Software, aggregated and displayed in the Astronomy Domain-Specific Metadata of the Dataset that the file belongs to. This FITS file metadata, is therefore searchable and browsable (facets) at the Dataset-level. Compressed Files ---------------- @@ -388,7 +388,7 @@ Choosing a License ------------------ Each Dataverse installation provides a set of license(s) data can be released under, and whether users can specify custom terms instead (see below). -One of the available licenses (often the `Creative Commons CC0 Public Domain Dedication `_) serves as the default if you do not make an explicit choice. +One of the available licenses (often the `Creative Commons CC0 Public Domain Dedication `_) serves as the default if you do not make an explicit choice. If you want to apply one of the other available licenses to your dataset, you can change it on the Terms tab of your Dataset page. License Selection and Professional Norms diff --git a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst index f1d5611ede9..33ae9b555e6 100644 --- a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst +++ b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst @@ -27,7 +27,7 @@ separately, in a relational database, so that it can be accessed efficiently by the application. For the purposes of archival preservation it can be exported, in plain text XML files, using a standardized, open `DDI Codebook -`_ +`_ format. (more info below) @@ -53,6 +53,6 @@ Tabular Metadata in the Dataverse Software The structure of the metadata defining tabular data variables used in the Dataverse Software was originally based on the `DDI Codebook -`_ format. +`_ format. You can see an example of DDI output under the :ref:`data-variable-metadata-access` section of the :doc:`/api/dataaccess` section of the API Guide. From 044ed40c17e1fa5fdbb7c8745a4671add25414c0 Mon Sep 17 00:00:00 2001 From: bencomp Date: Tue, 18 Oct 2022 00:32:55 +0200 Subject: [PATCH 0007/1525] Align table boundary in SWORD doc --- doc/sphinx-guides/source/api/sword.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/sword.rst b/doc/sphinx-guides/source/api/sword.rst index c9ac83bc204..51391784bde 100755 --- a/doc/sphinx-guides/source/api/sword.rst +++ b/doc/sphinx-guides/source/api/sword.rst @@ -127,7 +127,7 @@ Dublin Core Terms (DC Terms) Qualified Mapping - Dataverse Project DB Element Cr +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ |dcterms:creator | authorName (LastName, FirstName) | Y | Author(s) for the Dataset. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ -|dcterms:subject | subject (Controlled Vocabulary) OR keyword | Y | Controlled Vocabulary list is in our User Guide > `Metadata References `_. | +|dcterms:subject | subject (Controlled Vocabulary) OR keyword | Y | Controlled Vocabulary list is in our User Guide > `Metadata References `_. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ |dcterms:description | dsDescriptionValue | Y | Describing the purpose, scope or nature of the Dataset. Can also use dcterms:abstract. | +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+ From cbc42d5052f8a9afc30121082a44c128387e2023 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Tue, 8 Nov 2022 14:07:32 +0100 Subject: [PATCH 0008/1525] renamed and moved the direct upload JVM option in the documentation --- doc/release-notes/4.20-release-notes.md | 7 +------ doc/sphinx-guides/source/installation/config.rst | 7 ++++++- .../edu/harvard/iq/dataverse/dataaccess/StorageIO.java | 2 +- src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java | 2 +- .../java/edu/harvard/iq/dataverse/util/SystemConfig.java | 2 +- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/doc/release-notes/4.20-release-notes.md b/doc/release-notes/4.20-release-notes.md index 79037d8cd8c..ec52b638274 100644 --- a/doc/release-notes/4.20-release-notes.md +++ b/doc/release-notes/4.20-release-notes.md @@ -90,14 +90,10 @@ Also note that the :MaxFileUploadSizeInBytes property has a new option to provid ### Direct S3 Upload Changes -Direct upload to S3 in UI and API is enabled per store by one new jvm option: +Direct upload to S3 is enabled per store by one new jvm option: ./asadmin create-jvm-options "\-Ddataverse.files..upload-redirect=true" -This option makes direct upload the default in the UI. In the API, you can use either: direct upload or upload via Dataverse upload. Direct upload to S3 in API only is enabled per store by this new jvm option: - - ./asadmin create-jvm-options "\-Ddataverse.files..api-direct-upload=true" - That option leaves via Dataverse upload by default in UI, but makes both: uploads via Dataverse and direct uploads possible via API. The existing :MaxFileUploadSizeInBytes property and ```dataverse.files..url-expiration-minutes``` jvm option for the same store also apply to direct upload. @@ -133,7 +129,6 @@ We made changes to the JSON Export in this release (Issue 6650, PR #6669). If yo - The JVM option dataverse.files.file.directory= controls where temporary files are stored (in the /temp subdir of the defined directory), independent of the location of any 'file' store defined above. - The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset in the S3 bucket. (S3 stores only!) -- The JVM option dataverse.files..api-direct-upload enables direct upload of files added to a dataset in any storage. (Via API only and when the uploading tool has direct access to the relevant storage used; i.e., upload the file first and register it via API!) - The JVM option dataverse.files..MaxFileUploadSizeInBytes controls the maximum size of file uploads allowed for the given file store. - The JVM option dataverse.files..ingestsizelimit controls the maximum size of files for which ingest will be attempted, for the given file store. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 2e68bfaa1ab..4f15ad81190 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -249,6 +249,12 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. +When using integration tools, dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.allow-out-of-band-upload`` JVM option to ``true``. +Files can be then uploaded by an integration tool with ``datasets/{id}/add`` api call, or uploaded directly to the storage and registerd in a dataset afterwards using the ``datasets/{id}/addFiles`` api call. +Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``allow-out-of-band-upload`` and will enable direct upload even with ``allow-out-of-band-upload`` not set (or set to false). +In other words, ``dataverse.files.\.allow-out-of-band-upload`` option opens the ``datasets/{id}/add`` and ``datasets/{id}/addFiles`` api endpoints without redirecting uploads in the UI. +Enabling the ``upload-redirect`` option allows then direct upload automatically, without the need of enabling the ``allow-out-of-band-upload`` (setting it to ``false`` does not have any effect in that case). + The following sections describe how to set up various types of stores and how to configure for multiple stores. Multi-store Basics @@ -546,7 +552,6 @@ List of S3 Storage Options dataverse.files..bucket-name The bucket name. See above. (none) dataverse.files..download-redirect ``true``/``false`` Enable direct download or proxy through Dataverse. ``false`` dataverse.files..upload-redirect ``true``/``false`` Enable direct upload of files added to a dataset in the S3 store. ``false`` - dataverse.files..api-direct-upload ``true``/``false`` Enable direct upload of files added to a dataset via API only. ``false`` dataverse.files..ingestsizelimit Maximum size of directupload files that should be ingested (none) dataverse.files..url-expiration-minutes If direct uploads/downloads: time until links expire. Optional. 60 dataverse.files..min-part-size Multipart direct uploads will occur for files larger than this. Optional. ``1024**3`` diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 8e2dd9fa961..a2ff546ef0a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -606,7 +606,7 @@ public static String getDriverPrefix(String driverId) { public static boolean isDirectUploadEnabled(String driverId) { return Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || - Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".api-direct-upload")); + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".allow-out-of-band-upload")); } //Check that storageIdentifier is consistent with store's config diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 0a41da4f7dd..db82df72b8a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1749,7 +1749,7 @@ public static boolean isPackageFile(DataFile dataFile) { public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) { String driverId = dataset.getEffectiveStorageDriverId(); boolean directEnabled = Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || - Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".api-direct-upload")); + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".allow-out-of-band-upload")); //Should only be requested when it is allowed, but we'll log a warning otherwise if(!directEnabled) { logger.warning("Direct upload not supported for files in this dataset: " + dataset.getId()); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 4585d99a01f..b040f557895 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1112,7 +1112,7 @@ public boolean isDatafileValidationOnPublishEnabled() { } public boolean directUploadEnabled(DvObjectContainer container) { - // this method is used in UI only, therfore "dataverse.files." + driverId + ".api-direct-upload" is not used here + // this method is used in UI only, therfore "dataverse.files." + driverId + ".allow-out-of-band-upload" is not used here return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); } From 4abac1ac15d77f2f059977254971cf4be0f3f1f1 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 09:40:34 +0100 Subject: [PATCH 0009/1525] revert by accident editted old release notes --- doc/release-notes/4.20-release-notes.md | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/doc/release-notes/4.20-release-notes.md b/doc/release-notes/4.20-release-notes.md index ec52b638274..e29953db101 100644 --- a/doc/release-notes/4.20-release-notes.md +++ b/doc/release-notes/4.20-release-notes.md @@ -93,9 +93,7 @@ Also note that the :MaxFileUploadSizeInBytes property has a new option to provid Direct upload to S3 is enabled per store by one new jvm option: ./asadmin create-jvm-options "\-Ddataverse.files..upload-redirect=true" - -That option leaves via Dataverse upload by default in UI, but makes both: uploads via Dataverse and direct uploads possible via API. - + The existing :MaxFileUploadSizeInBytes property and ```dataverse.files..url-expiration-minutes``` jvm option for the same store also apply to direct upload. Direct upload via the Dataverse web interface is transparent to the user and handled automatically by the browser. Some minor differences in file upload exist: directly uploaded files are not unzipped and Dataverse does not scan their content to help in assigning a MIME type. Ingest of tabular files and metadata extraction from FITS files will occur, but can be turned off for files above a specified size limit through the new dataverse.files..ingestsizelimit jvm option. @@ -128,7 +126,7 @@ We made changes to the JSON Export in this release (Issue 6650, PR #6669). If yo ## New JVM Options for file storage drivers - The JVM option dataverse.files.file.directory= controls where temporary files are stored (in the /temp subdir of the defined directory), independent of the location of any 'file' store defined above. -- The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset in the S3 bucket. (S3 stores only!) +- The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset to the S3 bucket. (S3 stores only!) - The JVM option dataverse.files..MaxFileUploadSizeInBytes controls the maximum size of file uploads allowed for the given file store. - The JVM option dataverse.files..ingestsizelimit controls the maximum size of files for which ingest will be attempted, for the given file store. From 578c7af84e7cd1eac52901643d9bb49bc878cfa3 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 10:05:20 +0100 Subject: [PATCH 0010/1525] indentation fixes --- .../iq/dataverse/util/SystemConfig.java | 407 +++++++++--------- 1 file changed, 205 insertions(+), 202 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index b040f557895..f3d8e46b004 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -59,8 +59,8 @@ public class SystemConfig { @EJB AuthenticationServiceBean authenticationService; - - public static final String DATAVERSE_PATH = "/dataverse/"; + + public static final String DATAVERSE_PATH = "/dataverse/"; /** * A JVM option for the advertised fully qualified domain name (hostname) of @@ -70,11 +70,11 @@ public class SystemConfig { * The equivalent in DVN 3.x was "dvn.inetAddress". */ public static final String FQDN = "dataverse.fqdn"; - + /** * A JVM option for specifying the "official" URL of the site. - * Unlike the FQDN option above, this would be a complete URL, - * with the protocol, port number etc. + * Unlike the FQDN option above, this would be a complete URL, + * with the protocol, port number etc. */ public static final String SITE_URL = "dataverse.siteUrl"; @@ -102,41 +102,41 @@ public class SystemConfig { private String saneDefaultForSolrHostColonPort = "localhost:8983"; /** - * The default number of datafiles that we allow to be created through + * The default number of datafiles that we allow to be created through * zip file upload. */ - private static final int defaultZipUploadFilesLimit = 1000; + private static final int defaultZipUploadFilesLimit = 1000; public static final long defaultZipDownloadLimit = 104857600L; // 100MB private static final int defaultMultipleUploadFilesLimit = 1000; private static final int defaultLoginSessionTimeout = 480; // = 8 hours - private static String appVersionString = null; - private static String buildNumberString = null; - + private static String appVersionString = null; + private static String buildNumberString = null; + private static final String JVM_TIMER_SERVER_OPTION = "dataverse.timerServer"; - - private static final long DEFAULT_GUESTBOOK_RESPONSES_DISPLAY_LIMIT = 5000L; + + private static final long DEFAULT_GUESTBOOK_RESPONSES_DISPLAY_LIMIT = 5000L; private static final long DEFAULT_THUMBNAIL_SIZE_LIMIT_IMAGE = 3000000L; // 3 MB private static final long DEFAULT_THUMBNAIL_SIZE_LIMIT_PDF = 1000000L; // 1 MB - + public final static String DEFAULTCURATIONLABELSET = "DEFAULT"; public final static String CURATIONLABELSDISABLED = "DISABLED"; - + public String getVersion() { return getVersion(false); } - + // The return value is a "prviate static String", that should be initialized - // once, on the first call (see the code below)... But this is a @Stateless - // bean... so that would mean "once per thread"? - this would be a prime + // once, on the first call (see the code below)... But this is a @Stateless + // bean... so that would mean "once per thread"? - this would be a prime // candidate for being moved into some kind of an application-scoped caching // service... some CachingService @Singleton - ? (L.A. 5.8) public String getVersion(boolean withBuildNumber) { - + if (appVersionString == null) { // The Version Number is no longer supplied in a .properties file - so - // we can't just do + // we can't just do // return BundleUtil.getStringFromBundle("version.number", null, ResourceBundle.getBundle("VersionNumber", Locale.US)); // // Instead, we'll rely on Maven placing the version number into the @@ -144,31 +144,31 @@ public String getVersion(boolean withBuildNumber) { // (this is considered a better practice, and will also allow us // to maintain this number in only one place - the pom.xml file) // -- L.A. 4.0.2 - - // One would assume, that once the version is in the MANIFEST.MF, - // as Implementation-Version:, it would be possible to obtain - // said version simply as + + // One would assume, that once the version is in the MANIFEST.MF, + // as Implementation-Version:, it would be possible to obtain + // said version simply as // appVersionString = getClass().getPackage().getImplementationVersion(); - // alas - that's not working, for whatever reason. (perhaps that's + // alas - that's not working, for whatever reason. (perhaps that's // only how it works with jar-ed packages; not with .war files). - // People on the interwebs suggest that one should instead - // open the Manifest as a resource, then extract its attributes. - // There were some complications with that too. Plus, relying solely - // on the MANIFEST.MF would NOT work for those of the developers who - // are using "in place deployment" (i.e., where - // Netbeans runs their builds directly from the local target - // directory, bypassing the war file deployment; and the Manifest - // is only available in the .war file). For that reason, I am - // going to rely on the pom.properties file, and use java.util.Properties + // People on the interwebs suggest that one should instead + // open the Manifest as a resource, then extract its attributes. + // There were some complications with that too. Plus, relying solely + // on the MANIFEST.MF would NOT work for those of the developers who + // are using "in place deployment" (i.e., where + // Netbeans runs their builds directly from the local target + // directory, bypassing the war file deployment; and the Manifest + // is only available in the .war file). For that reason, I am + // going to rely on the pom.properties file, and use java.util.Properties // to read it. We have to look for this file in 2 different places - // depending on whether this is a .war file deployment, or a + // depending on whether this is a .war file deployment, or a // developers build. (the app-level META-INF is only populated when - // a .war file is built; the "maven-archiver" directory, on the other + // a .war file is built; the "maven-archiver" directory, on the other // hand, is only available when it's a local build deployment). - // So, long story short, I'm resorting to the convoluted steps below. - // It may look hacky, but it should actually be pretty solid and - // reliable. - + // So, long story short, I'm resorting to the convoluted steps below. + // It may look hacky, but it should actually be pretty solid and + // reliable. + // First, find the absolute path url of the application persistence file // always supplied with the Dataverse app: @@ -180,46 +180,46 @@ public String getVersion(boolean withBuildNumber) { filePath = fileUrl.getFile(); if (filePath != null) { InputStream mavenPropertiesInputStream = null; - String mavenPropertiesFilePath; + String mavenPropertiesFilePath; Properties mavenProperties = new Properties(); filePath = filePath.replaceFirst("/[^/]*$", "/"); - // Using a relative path, find the location of the maven pom.properties file. - // First, try to look for it in the app-level META-INF. This will only be - // available if it's a war file deployment: + // Using a relative path, find the location of the maven pom.properties file. + // First, try to look for it in the app-level META-INF. This will only be + // available if it's a war file deployment: mavenPropertiesFilePath = filePath.concat("../../../META-INF/maven/edu.harvard.iq/dataverse/pom.properties"); - + try { mavenPropertiesInputStream = new FileInputStream(mavenPropertiesFilePath); } catch (IOException ioex) { - // OK, let's hope this is a local dev. build. - // In that case the properties file should be available in - // the maven-archiver directory: - + // OK, let's hope this is a local dev. build. + // In that case the properties file should be available in + // the maven-archiver directory: + mavenPropertiesFilePath = filePath.concat("../../../../maven-archiver/pom.properties"); - - // try again: - + + // try again: + try { mavenPropertiesInputStream = new FileInputStream(mavenPropertiesFilePath); } catch (IOException ioex2) { logger.warning("Failed to find and/or open for reading the pom.properties file."); - mavenPropertiesInputStream = null; + mavenPropertiesInputStream = null; } } - + if (mavenPropertiesInputStream != null) { try { mavenProperties.load(mavenPropertiesInputStream); - appVersionString = mavenProperties.getProperty("version"); + appVersionString = mavenProperties.getProperty("version"); } catch (IOException ioex) { logger.warning("caught IOException trying to read and parse the pom properties file."); } finally { IOUtils.closeQuietly(mavenPropertiesInputStream); } } - + } else { logger.warning("Null file path representation of the location of persistence.xml in the webapp root directory!"); } @@ -229,53 +229,54 @@ public String getVersion(boolean withBuildNumber) { if (appVersionString == null) { - // still null? - defaulting to 4.0: + // still null? - defaulting to 4.0: appVersionString = "4.0"; } } - + if (withBuildNumber) { if (buildNumberString == null) { - // (build number is still in a .properties file in the source tree; it only - // contains a real build number if this war file was built by - // Jenkins) - + // (build number is still in a .properties file in the source tree; it only + // contains a real build number if this war file was built by + // Jenkins) + try { buildNumberString = ResourceBundle.getBundle("BuildNumber").getString("build.number"); } catch (MissingResourceException ex) { - buildNumberString = null; + buildNumberString = null; } } - + if (buildNumberString != null && !buildNumberString.equals("")) { - return appVersionString + " build " + buildNumberString; - } - } - - return appVersionString; + return appVersionString + " build " + buildNumberString; + } + } + + return appVersionString; } public String getSolrHostColonPort() { String SolrHost; if ( System.getenv("SOLR_SERVICE_HOST") != null && System.getenv("SOLR_SERVICE_HOST") != ""){ SolrHost = System.getenv("SOLR_SERVICE_HOST"); + } else { + SolrHost = saneDefaultForSolrHostColonPort; } - else SolrHost = saneDefaultForSolrHostColonPort; String solrHostColonPort = settingsService.getValueForKey(SettingsServiceBean.Key.SolrHostColonPort, SolrHost); return solrHostColonPort; } public boolean isProvCollectionEnabled() { String provCollectionEnabled = settingsService.getValueForKey(SettingsServiceBean.Key.ProvCollectionEnabled, null); - if("true".equalsIgnoreCase(provCollectionEnabled)){ + if ("true".equalsIgnoreCase(provCollectionEnabled)) { return true; } return false; } - + public int getMetricsCacheTimeoutMinutes() { - int defaultValue = 10080; //one week in minutes + int defaultValue = 10080; // one week in minutes SettingsServiceBean.Key key = SettingsServiceBean.Key.MetricsCacheTimeoutMinutes; String metricsCacheTimeString = settingsService.getValueForKey(key); if (metricsCacheTimeString != null) { @@ -293,7 +294,7 @@ public int getMetricsCacheTimeoutMinutes() { } return defaultValue; } - + public int getMinutesUntilConfirmEmailTokenExpires() { final int minutesInOneDay = 1440; final int reasonableDefault = minutesInOneDay; @@ -338,10 +339,10 @@ public static int getMinutesUntilPasswordResetTokenExpires() { } return reasonableDefault; } - + /** * The "official", designated URL of the site; - * can be defined as a complete URL; or derived from the + * can be defined as a complete URL; or derived from the * "official" hostname. If none of these options is set, * defaults to the InetAddress.getLocalHOst() and https; * These are legacy JVM options. Will be eventualy replaced @@ -350,7 +351,7 @@ public static int getMinutesUntilPasswordResetTokenExpires() { public String getDataverseSiteUrl() { return getDataverseSiteUrlStatic(); } - + public static String getDataverseSiteUrlStatic() { String hostUrl = System.getProperty(SITE_URL); if (hostUrl != null && !"".equals(hostUrl)) { @@ -367,19 +368,20 @@ public static String getDataverseSiteUrlStatic() { hostUrl = "https://" + hostName; return hostUrl; } - + /** - * URL Tracking: + * URL Tracking: */ public String getPageURLWithQueryString() { - return PrettyContext.getCurrentInstance().getRequestURL().toURL() + PrettyContext.getCurrentInstance().getRequestQueryString().toQueryString(); + return PrettyContext.getCurrentInstance().getRequestURL().toURL() + + PrettyContext.getCurrentInstance().getRequestQueryString().toQueryString(); } /** - * The "official" server's fully-qualified domain name: + * The "official" server's fully-qualified domain name: */ public String getDataverseServer() { - // still reliese on a JVM option: + // still reliese on a JVM option: String fqdn = System.getProperty(FQDN); if (fqdn == null) { try { @@ -447,44 +449,44 @@ public static int getIntLimitFromStringOrDefault(String limitSetting, Integer de /** * Download-as-zip size limit. - * returns defaultZipDownloadLimit if not specified; - * set to -1 to disable zip downloads. + * returns defaultZipDownloadLimit if not specified; + * set to -1 to disable zip downloads. */ public long getZipDownloadLimit() { String zipLimitOption = settingsService.getValueForKey(SettingsServiceBean.Key.ZipDownloadLimit); return getLongLimitFromStringOrDefault(zipLimitOption, defaultZipDownloadLimit); } - + public int getZipUploadFilesLimit() { String limitOption = settingsService.getValueForKey(SettingsServiceBean.Key.ZipUploadFilesLimit); return getIntLimitFromStringOrDefault(limitOption, defaultZipUploadFilesLimit); } - + /** - * Session timeout, in minutes. + * Session timeout, in minutes. * (default value provided) */ public int getLoginSessionTimeout() { return getIntLimitFromStringOrDefault( - settingsService.getValueForKey(SettingsServiceBean.Key.LoginSessionTimeout), - defaultLoginSessionTimeout); + settingsService.getValueForKey(SettingsServiceBean.Key.LoginSessionTimeout), + defaultLoginSessionTimeout); } - + /* ` the number of files the GUI user is allowed to upload in one batch, via drag-and-drop, or through the file select dialog - */ + */ public int getMultipleUploadFilesLimit() { String limitOption = settingsService.getValueForKey(SettingsServiceBean.Key.MultipleUploadFilesLimit); return getIntLimitFromStringOrDefault(limitOption, defaultMultipleUploadFilesLimit); } - + public long getGuestbookResponsesPageDisplayLimit() { String limitSetting = settingsService.getValueForKey(SettingsServiceBean.Key.GuestbookResponsesPageDisplayLimit); return getLongLimitFromStringOrDefault(limitSetting, DEFAULT_GUESTBOOK_RESPONSES_DISPLAY_LIMIT); } - - public long getUploadLogoSizeLimit(){ + + public long getUploadLogoSizeLimit() { return 500000; } @@ -497,10 +499,10 @@ public long getThumbnailSizeLimitPDF() { } public static long getThumbnailSizeLimit(String type) { - String option = null; - - //get options via jvm options - + String option = null; + + // get options via jvm options + if ("Image".equals(type)) { option = System.getProperty("dataverse.dataAccess.thumbnail.image.limit"); return getLongLimitFromStringOrDefault(option, DEFAULT_THUMBNAIL_SIZE_LIMIT_IMAGE); @@ -512,19 +514,19 @@ public static long getThumbnailSizeLimit(String type) { // Zero (0) means no limit. return getLongLimitFromStringOrDefault(option, 0L); } - + public boolean isThumbnailGenerationDisabledForType(String type) { return getThumbnailSizeLimit(type) == -1l; } - + public boolean isThumbnailGenerationDisabledForImages() { return isThumbnailGenerationDisabledForType("Image"); } - + public boolean isThumbnailGenerationDisabledForPDF() { return isThumbnailGenerationDisabledForType("PDF"); } - + public String getApplicationTermsOfUse() { String language = BundleUtil.getCurrentLocale().getLanguage(); String saneDefaultForAppTermsOfUse = BundleUtil.getStringFromBundle("system.app.terms"); @@ -532,9 +534,9 @@ public String getApplicationTermsOfUse() { // value, or as a better default than the saneDefaultForAppTermsOfUse if there // is no language-specific value String appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, saneDefaultForAppTermsOfUse); - //Now get the language-specific value if it exists + // Now get the language-specific value if it exists if (language != null && !language.equalsIgnoreCase(BundleUtil.getDefaultLocale().getLanguage())) { - appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, language, appTermsOfUse); + appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, language, appTermsOfUse); } return appTermsOfUse; } @@ -545,7 +547,7 @@ public String getApiTermsOfUse() { return apiTermsOfUse; } - // TODO: + // TODO: // remove this method! // pages should be using settingsWrapper.get(":ApplicationPrivacyPolicyUrl") instead. -- 4.2.1 public String getApplicationPrivacyPolicyUrl() { @@ -564,10 +566,10 @@ public boolean isFilesOnDatasetPageFromSolr() { return settingsService.isTrueForKey(SettingsServiceBean.Key.FilesOnDatasetPageFromSolr, safeDefaultIfKeyNotFound); } - public Long getMaxFileUploadSizeForStore(String driverId){ - return settingsService.getValueForCompoundKeyAsLong(SettingsServiceBean.Key.MaxFileUploadSizeInBytes, driverId); - } - + public Long getMaxFileUploadSizeForStore(String driverId) { + return settingsService.getValueForCompoundKeyAsLong(SettingsServiceBean.Key.MaxFileUploadSizeInBytes, driverId); + } + public Integer getSearchHighlightFragmentSize() { String fragSize = settingsService.getValueForKey(SettingsServiceBean.Key.SearchHighlightFragmentSize); if (fragSize != null) { @@ -581,12 +583,12 @@ public Integer getSearchHighlightFragmentSize() { } public long getTabularIngestSizeLimit() { - // This method will return the blanket ingestable size limit, if - // set on the system. I.e., the universal limit that applies to all - // tabular ingests, regardless of fromat: - - String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.TabularIngestSizeLimit); - + // This method will return the blanket ingestable size limit, if + // set on the system. I.e., the universal limit that applies to all + // tabular ingests, regardless of fromat: + + String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.TabularIngestSizeLimit); + if (limitEntry != null) { try { Long sizeOption = new Long(limitEntry); @@ -595,48 +597,48 @@ public long getTabularIngestSizeLimit() { logger.warning("Invalid value for TabularIngestSizeLimit option? - " + limitEntry); } } - // -1 means no limit is set; - // 0 on the other hand would mean that ingest is fully disabled for - // tabular data. - return -1; + // -1 means no limit is set; + // 0 on the other hand would mean that ingest is fully disabled for + // tabular data. + return -1; } - + public long getTabularIngestSizeLimit(String formatName) { // This method returns the size limit set specifically for this format name, - // if available, otherwise - the blanket limit that applies to all tabular - // ingests regardless of a format. - + // if available, otherwise - the blanket limit that applies to all tabular + // ingests regardless of a format. + if (formatName == null || formatName.equals("")) { - return getTabularIngestSizeLimit(); + return getTabularIngestSizeLimit(); } - + String limitEntry = settingsService.get(SettingsServiceBean.Key.TabularIngestSizeLimit.toString() + ":" + formatName); - + if (limitEntry != null) { try { Long sizeOption = new Long(limitEntry); return sizeOption; } catch (NumberFormatException nfe) { - logger.warning("Invalid value for TabularIngestSizeLimit:" + formatName + "? - " + limitEntry ); + logger.warning("Invalid value for TabularIngestSizeLimit:" + formatName + "? - " + limitEntry); } } - - return getTabularIngestSizeLimit(); + + return getTabularIngestSizeLimit(); } public boolean isOAIServerEnabled() { boolean defaultResponse = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.OAIServerEnabled, defaultResponse); } - + public void enableOAIServer() { settingsService.setValueForKey(SettingsServiceBean.Key.OAIServerEnabled, "true"); } - + public void disableOAIServer() { settingsService.deleteValueForKey(SettingsServiceBean.Key.OAIServerEnabled); - } - + } + public boolean isTimerServer() { String optionValue = System.getProperty(JVM_TIMER_SERVER_OPTION); if ("true".equalsIgnoreCase(optionValue)) { @@ -704,11 +706,12 @@ public String getOAuth2CallbackUrl() { } return saneDefault; } - + public boolean isShibPassiveLoginEnabled() { boolean defaultResponse = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.ShibPassiveLoginEnabled, defaultResponse); } + public boolean isShibAttributeCharacterSetConversionEnabled() { boolean defaultResponse = true; return settingsService.isTrueForKey(SettingsServiceBean.Key.ShibAttributeCharacterSetConversionEnabled, defaultResponse); @@ -734,7 +737,7 @@ public String getPVDictionaries() { public int getPVGoodStrength() { // FIXME: Change this to 21 to match Harvard's requirements or implement a way to disable the rule (0 or -1) and have the default be disabled. int goodStrengthLength = 20; - //String _goodStrengthLength = System.getProperty("pv.goodstrength", settingsService.get(SettingsServiceBean.Key.PVGoodStrength.toString())); + // String _goodStrengthLength = System.getProperty("pv.goodstrength", settingsService.get(SettingsServiceBean.Key.PVGoodStrength.toString())); String _goodStrengthLength = settingsService.get(SettingsServiceBean.Key.PVGoodStrength.toString()); try { goodStrengthLength = Integer.parseInt(_goodStrengthLength); @@ -862,9 +865,7 @@ public enum FileUploadMethods { * Upload through Globus of large files */ - GLOBUS("globus") - ; - + GLOBUS("globus"); private final String text; @@ -887,8 +888,7 @@ public static FileUploadMethods fromString(String text) { public String toString() { return text; } - - + } /** @@ -904,8 +904,8 @@ public enum FileDownloadMethods { */ RSYNC("rsal/rsync"), NATIVE("native/http"), - GLOBUS("globus") - ; + GLOBUS("globus"); + private final String text; private FileDownloadMethods(final String text) { @@ -927,27 +927,28 @@ public static FileUploadMethods fromString(String text) { public String toString() { return text; } - + } - + public enum DataFilePIDFormat { DEPENDENT("DEPENDENT"), INDEPENDENT("INDEPENDENT"); + private final String text; public String getText() { return text; } - - private DataFilePIDFormat(final String text){ + + private DataFilePIDFormat(final String text) { this.text = text; } - + @Override public String toString() { return text; } - + } /** @@ -987,44 +988,44 @@ public String toString() { } - public boolean isPublicInstall(){ + public boolean isPublicInstall() { boolean saneDefault = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.PublicInstall, saneDefault); } - - public boolean isRsyncUpload(){ + + public boolean isRsyncUpload() { return getMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString(), true); } - public boolean isGlobusUpload(){ + public boolean isGlobusUpload() { return getMethodAvailable(FileUploadMethods.GLOBUS.toString(), true); } // Controls if HTTP upload is enabled for both GUI and API. - public boolean isHTTPUpload(){ + public boolean isHTTPUpload() { return getMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString(), true); } - - public boolean isRsyncOnly(){ + + public boolean isRsyncOnly() { String downloadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.DownloadMethods); - if(downloadMethods == null){ + if (downloadMethods == null) { return false; } - if (!downloadMethods.toLowerCase().equals(SystemConfig.FileDownloadMethods.RSYNC.toString())){ + if (!downloadMethods.toLowerCase().equals(SystemConfig.FileDownloadMethods.RSYNC.toString())) { return false; } String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); - if (uploadMethods==null){ + if (uploadMethods == null) { return false; } else { - return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size() == 1 && uploadMethods.toLowerCase().equals(SystemConfig.FileUploadMethods.RSYNC.toString()); + return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size() == 1 && uploadMethods.toLowerCase().equals(SystemConfig.FileUploadMethods.RSYNC.toString()); } } - + public boolean isRsyncDownload() { return getMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString(), false); } - + public boolean isHTTPDownload() { return getMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString(), false); } @@ -1032,14 +1033,14 @@ public boolean isHTTPDownload() { public boolean isGlobusDownload() { return getMethodAvailable(FileUploadMethods.GLOBUS.toString(), false); } - + public boolean isGlobusFileDownload() { return (isGlobusDownload() && settingsService.isTrueForKey(SettingsServiceBean.Key.GlobusSingleFileTransfer, false)); } public List getGlobusStoresList() { - String globusStores = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusStores, ""); - return Arrays.asList(globusStores.split("\\s*,\\s*")); + String globusStores = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusStores, ""); + return Arrays.asList(globusStores.split("\\s*,\\s*")); } private Boolean getMethodAvailable(String method, boolean upload) { @@ -1051,31 +1052,32 @@ private Boolean getMethodAvailable(String method, boolean upload) { return Arrays.asList(methods.toLowerCase().split("\\s*,\\s*")).contains(method); } } - - public Integer getUploadMethodCount(){ - String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); - if (uploadMethods==null){ + + public Integer getUploadMethodCount() { + String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); + if (uploadMethods == null) { return 0; } else { - return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size(); - } + return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size(); + } } - public boolean isDataFilePIDSequentialDependent(){ + + public boolean isDataFilePIDSequentialDependent() { String doiIdentifierType = settingsService.getValueForKey(SettingsServiceBean.Key.IdentifierGenerationStyle, "randomString"); String doiDataFileFormat = settingsService.getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT"); - if (doiIdentifierType.equals("storedProcGenerated") && doiDataFileFormat.equals("DEPENDENT")){ + if (doiIdentifierType.equals("storedProcGenerated") && doiDataFileFormat.equals("DEPENDENT")) { return true; } return false; } - + public int getPIDAsynchRegFileCount() { String fileCount = settingsService.getValueForKey(SettingsServiceBean.Key.PIDAsynchRegFileCount, "10"); int retVal = 10; try { retVal = Integer.parseInt(fileCount); - } catch (NumberFormatException e) { - //if no number in the setting we'll return 10 + } catch (NumberFormatException e) { + // if no number in the setting we'll return 10 } return retVal; } @@ -1089,13 +1091,13 @@ public boolean isFilePIDsEnabled() { boolean safeDefaultIfKeyNotFound = true; return settingsService.isTrueForKey(SettingsServiceBean.Key.FilePIDsEnabled, safeDefaultIfKeyNotFound); } - + public boolean isIndependentHandleService() { boolean safeDefaultIfKeyNotFound = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.IndependentHandleService, safeDefaultIfKeyNotFound); - + } - + public String getHandleAuthHandle() { String handleAuthHandle = settingsService.getValueForKey(SettingsServiceBean.Key.HandleAuthHandle, null); return handleAuthHandle; @@ -1105,61 +1107,61 @@ public String getMDCLogPath() { String mDCLogPath = settingsService.getValueForKey(SettingsServiceBean.Key.MDCLogPath, null); return mDCLogPath; } - + public boolean isDatafileValidationOnPublishEnabled() { boolean safeDefaultIfKeyNotFound = true; return settingsService.isTrueForKey(SettingsServiceBean.Key.FileValidationOnPublishEnabled, safeDefaultIfKeyNotFound); } - public boolean directUploadEnabled(DvObjectContainer container) { + public boolean directUploadEnabled(DvObjectContainer container) { // this method is used in UI only, therfore "dataverse.files." + driverId + ".allow-out-of-band-upload" is not used here return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); - } - - public String getDataCiteRestApiUrlString() { - //As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. + } + + public String getDataCiteRestApiUrlString() { + // As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. return System.getProperty("doi.dataciterestapiurlstring", System.getProperty("doi.mdcbaseurlstring", "https://api.datacite.org")); - } - + } + public boolean isExternalDataverseValidationEnabled() { return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataValidatorScript) != null; - // alternatively, we can also check if the script specified exists, + // alternatively, we can also check if the script specified exists, // and is executable. -- ? } - + public boolean isExternalDatasetValidationEnabled() { return settingsService.getValueForKey(SettingsServiceBean.Key.DatasetMetadataValidatorScript) != null; - // alternatively, we can also check if the script specified exists, + // alternatively, we can also check if the script specified exists, // and is executable. -- ? } - + public String getDataverseValidationExecutable() { return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataValidatorScript); } - + public String getDatasetValidationExecutable() { return settingsService.getValueForKey(SettingsServiceBean.Key.DatasetMetadataValidatorScript); } - + public String getDataverseValidationFailureMsg() { String defaultMessage = "This dataverse collection cannot be published because it has failed an external metadata validation test."; return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataPublishValidationFailureMsg, defaultMessage); } - + public String getDataverseUpdateValidationFailureMsg() { String defaultMessage = "This dataverse collection cannot be updated because it has failed an external metadata validation test."; return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataUpdateValidationFailureMsg, defaultMessage); } - + public String getDatasetValidationFailureMsg() { String defaultMessage = "This dataset cannot be published because it has failed an external metadata validation test."; return settingsService.getValueForKey(SettingsServiceBean.Key.DatasetMetadataValidationFailureMsg, defaultMessage); } - + public boolean isExternalValidationAdminOverrideEnabled() { return "true".equalsIgnoreCase(settingsService.getValueForKey(SettingsServiceBean.Key.ExternalValidationAdminOverride)); } - + public long getDatasetValidationSizeLimit() { String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.DatasetChecksumValidationSizeLimit); @@ -1189,6 +1191,7 @@ public long getFileValidationSizeLimit() { // -1 means no limit is set; return -1; } + public Map getCurationLabels() { Map labelMap = new HashMap(); String setting = settingsService.getValueForKey(SettingsServiceBean.Key.AllowedCurationLabels, ""); @@ -1229,15 +1232,15 @@ public Map getCurationLabels() { } return labelMap; } - + public boolean isSignupDisabledForRemoteAuthProvider(String providerId) { - Boolean ret = settingsService.getValueForCompoundKeyAsBoolean(SettingsServiceBean.Key.AllowRemoteAuthSignUp, providerId); - - // we default to false - i.e., "not disabled" if the setting is not present: + Boolean ret = settingsService.getValueForCompoundKeyAsBoolean(SettingsServiceBean.Key.AllowRemoteAuthSignUp, providerId); + + // we default to false - i.e., "not disabled" if the setting is not present: if (ret == null) { - return false; + return false; } - - return !ret; + + return !ret; } } From 8578de173b63dbde3bb5440147422783621fbee9 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 10:10:57 +0100 Subject: [PATCH 0011/1525] tab character removed --- src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index f3d8e46b004..1edf5a0fb6e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1105,7 +1105,7 @@ public String getHandleAuthHandle() { public String getMDCLogPath() { String mDCLogPath = settingsService.getValueForKey(SettingsServiceBean.Key.MDCLogPath, null); - return mDCLogPath; + return mDCLogPath;this method is used } public boolean isDatafileValidationOnPublishEnabled() { @@ -1115,7 +1115,7 @@ public boolean isDatafileValidationOnPublishEnabled() { public boolean directUploadEnabled(DvObjectContainer container) { // this method is used in UI only, therfore "dataverse.files." + driverId + ".allow-out-of-band-upload" is not used here - return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); + return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); } public String getDataCiteRestApiUrlString() { From f2e75db13bcff1f5a5bc7d5cfc958db04be745c0 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 10:15:03 +0100 Subject: [PATCH 0012/1525] tab character removed --- src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 1edf5a0fb6e..0ab99c0de6a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1105,7 +1105,7 @@ public String getHandleAuthHandle() { public String getMDCLogPath() { String mDCLogPath = settingsService.getValueForKey(SettingsServiceBean.Key.MDCLogPath, null); - return mDCLogPath;this method is used + return mDCLogPath; } public boolean isDatafileValidationOnPublishEnabled() { From bff889d3864ca10f7dc4f7ae84595e40a2b70d34 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 10:25:39 +0100 Subject: [PATCH 0013/1525] tab character removed --- src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 0ab99c0de6a..7d7006e708e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1119,7 +1119,7 @@ public boolean directUploadEnabled(DvObjectContainer container) { } public String getDataCiteRestApiUrlString() { - // As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. + // As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. return System.getProperty("doi.dataciterestapiurlstring", System.getProperty("doi.mdcbaseurlstring", "https://api.datacite.org")); } From ad4bb5107fcb14b8c4ebb7f7fd57186511577548 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 11:51:31 +0100 Subject: [PATCH 0014/1525] renamed jvm option: allow-out-of-band-upload -> upload-out-of-band --- doc/sphinx-guides/source/installation/config.rst | 9 +++++---- .../edu/harvard/iq/dataverse/dataaccess/StorageIO.java | 2 +- .../java/edu/harvard/iq/dataverse/util/FileUtil.java | 2 +- .../java/edu/harvard/iq/dataverse/util/SystemConfig.java | 2 +- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 4f15ad81190..62cc984bc56 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -249,11 +249,11 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. -When using integration tools, dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.allow-out-of-band-upload`` JVM option to ``true``. +When using integration tools, dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. Files can be then uploaded by an integration tool with ``datasets/{id}/add`` api call, or uploaded directly to the storage and registerd in a dataset afterwards using the ``datasets/{id}/addFiles`` api call. -Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``allow-out-of-band-upload`` and will enable direct upload even with ``allow-out-of-band-upload`` not set (or set to false). -In other words, ``dataverse.files.\.allow-out-of-band-upload`` option opens the ``datasets/{id}/add`` and ``datasets/{id}/addFiles`` api endpoints without redirecting uploads in the UI. -Enabling the ``upload-redirect`` option allows then direct upload automatically, without the need of enabling the ``allow-out-of-band-upload`` (setting it to ``false`` does not have any effect in that case). +Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``upload-out-of-band`` and will enable direct upload even with ``upload-out-of-band`` not set (or set to false). +In other words, ``dataverse.files.\.upload-out-of-band`` option opens the ``datasets/{id}/add`` and ``datasets/{id}/addFiles`` api endpoints without redirecting uploads in the UI. +Enabling the ``upload-redirect`` option allows then direct upload automatically, without the need of enabling the ``upload-out-of-band`` (setting it to ``false`` does not have any effect in that case). The following sections describe how to set up various types of stores and how to configure for multiple stores. @@ -552,6 +552,7 @@ List of S3 Storage Options dataverse.files..bucket-name The bucket name. See above. (none) dataverse.files..download-redirect ``true``/``false`` Enable direct download or proxy through Dataverse. ``false`` dataverse.files..upload-redirect ``true``/``false`` Enable direct upload of files added to a dataset in the S3 store. ``false`` + dataverse.files..upload-out-of-band ``true``/``false`` Enable direct upload of files added to a dataset via API only. ``false`` dataverse.files..ingestsizelimit Maximum size of directupload files that should be ingested (none) dataverse.files..url-expiration-minutes If direct uploads/downloads: time until links expire. Optional. 60 dataverse.files..min-part-size Multipart direct uploads will occur for files larger than this. Optional. ``1024**3`` diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index a2ff546ef0a..85ca97d5f15 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -606,7 +606,7 @@ public static String getDriverPrefix(String driverId) { public static boolean isDirectUploadEnabled(String driverId) { return Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || - Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".allow-out-of-band-upload")); + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-out-of-band")); } //Check that storageIdentifier is consistent with store's config diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index db82df72b8a..9b549901d55 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1749,7 +1749,7 @@ public static boolean isPackageFile(DataFile dataFile) { public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) { String driverId = dataset.getEffectiveStorageDriverId(); boolean directEnabled = Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || - Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".allow-out-of-band-upload")); + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".aupload-out-of-band")); //Should only be requested when it is allowed, but we'll log a warning otherwise if(!directEnabled) { logger.warning("Direct upload not supported for files in this dataset: " + dataset.getId()); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 7d7006e708e..b45ad50ab1d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1114,7 +1114,7 @@ public boolean isDatafileValidationOnPublishEnabled() { } public boolean directUploadEnabled(DvObjectContainer container) { - // this method is used in UI only, therfore "dataverse.files." + driverId + ".allow-out-of-band-upload" is not used here + // this method is used in UI only, therfore "dataverse.files." + driverId + ".upload-out-of-band" is not used here return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); } From 49102ada3380863d115f5167343eb97446b35872 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 12:49:36 +0100 Subject: [PATCH 0015/1525] linking to api documentation --- doc/sphinx-guides/source/api/native-api.rst | 1 + doc/sphinx-guides/source/installation/config.rst | 5 ++--- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 6d68d648cb3..0341b6e07d1 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2348,6 +2348,7 @@ The fully expanded example above (without environment variables) looks like this Note: The ``id`` returned in the json response is the id of the file metadata version. +.. _add-file-metadata-api: Adding File Metadata ~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 62cc984bc56..b074a180c8f 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -250,10 +250,9 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. When using integration tools, dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. -Files can be then uploaded by an integration tool with ``datasets/{id}/add`` api call, or uploaded directly to the storage and registerd in a dataset afterwards using the ``datasets/{id}/addFiles`` api call. +Files can be then uploaded by an integration tool with :ref:`add-file-api` api call, or uploaded directly to the storage and registerd in a dataset afterwards using the :ref:`add-file-metadata-api` api call. Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``upload-out-of-band`` and will enable direct upload even with ``upload-out-of-band`` not set (or set to false). -In other words, ``dataverse.files.\.upload-out-of-band`` option opens the ``datasets/{id}/add`` and ``datasets/{id}/addFiles`` api endpoints without redirecting uploads in the UI. -Enabling the ``upload-redirect`` option allows then direct upload automatically, without the need of enabling the ``upload-out-of-band`` (setting it to ``false`` does not have any effect in that case). +In other words, ``dataverse.files.\.upload-out-of-band`` option opens the :ref:`add-file-api` and :ref:`add-file-metadata-api` api endpoints without redirecting uploads in the UI. The following sections describe how to set up various types of stores and how to configure for multiple stores. From e9d6df0bb6f23f4f4a8e7fe53213c91596980332 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 14:11:19 +0100 Subject: [PATCH 0016/1525] some improvements in the documentation --- doc/sphinx-guides/source/api/native-api.rst | 4 ++++ doc/sphinx-guides/source/installation/config.rst | 8 ++++---- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 0341b6e07d1..f075acf40f6 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1481,6 +1481,8 @@ In practice, you only need one the ``dataset_id`` or the ``persistentId``. The e print r.json() print r.status_code +This API call might result in an error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. It can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. + .. _add-remote-file-api: Add a Remote File to a Dataset @@ -2391,6 +2393,8 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/:persistentId/addFiles?persistentId=doi:10.5072/FK2/7U7YBV -F jsonData='[{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123456"}}, {"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357d53", "fileName":"file2.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123789"}}]' +This API call might result in an error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. It can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. + Updating File Metadata ~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index b074a180c8f..d3a22453453 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -249,10 +249,10 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. -When using integration tools, dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. -Files can be then uploaded by an integration tool with :ref:`add-file-api` api call, or uploaded directly to the storage and registerd in a dataset afterwards using the :ref:`add-file-metadata-api` api call. -Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``upload-out-of-band`` and will enable direct upload even with ``upload-out-of-band`` not set (or set to false). -In other words, ``dataverse.files.\.upload-out-of-band`` option opens the :ref:`add-file-api` and :ref:`add-file-metadata-api` api endpoints without redirecting uploads in the UI. +A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. +This option allows adding files with the :ref:`add-file-api` call. It also allows registering the metadata of a file with the :ref:`add-file-metadata-api` call for a file uploaded directly to the storage. +Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``upload-out-of-band`` option and will enable direct upload even with ``upload-out-of-band`` option not set (or set to false). +When neither of the two option is enabled, adding files with API will not be possible and will result with the "Dataset store configuration does not allow provided storageIdentifier" error. The following sections describe how to set up various types of stores and how to configure for multiple stores. From dc64aa23c3d4c364f46ad6e695e38ed3311455eb Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 17:47:13 +0100 Subject: [PATCH 0017/1525] documentation improvements by Dieuwertje --- doc/sphinx-guides/source/api/native-api.rst | 4 ++-- doc/sphinx-guides/source/installation/config.rst | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index f075acf40f6..54e47a29b9d 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1481,7 +1481,7 @@ In practice, you only need one the ``dataset_id`` or the ``persistentId``. The e print r.json() print r.status_code -This API call might result in an error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. It can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. +This API call might result in the following error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. This error can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. .. _add-remote-file-api: @@ -2393,7 +2393,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/:persistentId/addFiles?persistentId=doi:10.5072/FK2/7U7YBV -F jsonData='[{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123456"}}, {"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357d53", "fileName":"file2.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123789"}}]' -This API call might result in an error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. It can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. +This API call might result in the following error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. This error can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. Updating File Metadata ~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index d3a22453453..4eadcc8ed9d 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -250,9 +250,9 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. -This option allows adding files with the :ref:`add-file-api` call. It also allows registering the metadata of a file with the :ref:`add-file-metadata-api` call for a file uploaded directly to the storage. -Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``upload-out-of-band`` option and will enable direct upload even with ``upload-out-of-band`` option not set (or set to false). -When neither of the two option is enabled, adding files with API will not be possible and will result with the "Dataset store configuration does not allow provided storageIdentifier" error. +This option allows API users to add files with the :ref:`add-file-api` call. It also allows API users to register the metadata of a file with the :ref:`add-file-metadata-api` call for a file that was uploaded directly to the storage. +Note that if a Dataverse installation uses S3-storage while the ``dataverse.files.\.upload-redirect`` JVM option is enabled, the ``upload-out-of-band`` setting is overruled. This results in direct upload being enabled even with the ``upload-out-of-band`` option not set (or set to false). +When the ``upload-out-of-band`` option is not set to ``true`` and it isn't being overruled by the previously mentioned combination, adding files using the API will not be possible and will return the "Dataset store configuration does not allow provided storageIdentifier" error. The following sections describe how to set up various types of stores and how to configure for multiple stores. From 085fb8f44503d69354b5cb8f5793d8144dbde0e1 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Mon, 21 Nov 2022 09:53:01 +0100 Subject: [PATCH 0018/1525] improvements in the documentation --- doc/sphinx-guides/source/installation/config.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 4eadcc8ed9d..467872bfdd4 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -251,6 +251,9 @@ A Dataverse installation may also be configured to reference some files (e.g. la A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. This option allows API users to add files with the :ref:`add-file-api` call. It also allows API users to register the metadata of a file with the :ref:`add-file-metadata-api` call for a file that was uploaded directly to the storage. + +The option is useful in cases in which an S3 storage is not used or made public, as required by the ``dataverse.files.\.upload-redirect`` option. An example would be building a tool for synchronizing datasets with files from a third-party repository. In such a case, the tool would upload files directly to the storage, and then use :ref:`add-file-metadata-api` to link them to a dataset. + Note that if a Dataverse installation uses S3-storage while the ``dataverse.files.\.upload-redirect`` JVM option is enabled, the ``upload-out-of-band`` setting is overruled. This results in direct upload being enabled even with the ``upload-out-of-band`` option not set (or set to false). When the ``upload-out-of-band`` option is not set to ``true`` and it isn't being overruled by the previously mentioned combination, adding files using the API will not be possible and will return the "Dataset store configuration does not allow provided storageIdentifier" error. From d870e202dccac268cc3f099277559d3e473b7944 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 9 Jan 2023 13:18:06 +0100 Subject: [PATCH 0019/1525] chore(deps): upgrade Nimbus OIDC SDK to latest 10.4 release #9268 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 8b6f98c5896..a5d52fd7545 100644 --- a/pom.xml +++ b/pom.xml @@ -381,7 +381,7 @@ com.nimbusds oauth2-oidc-sdk - 9.41.1 + 10.4 From 2ee66618ed77d55878300a7baaa4fa4a94ac7162 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 9 Jan 2023 15:52:14 +0100 Subject: [PATCH 0020/1525] style(oidc): make class fields final in OIDCAuthProvider These values should not be changed once the provider has been initialized. --- .../oauth2/oidc/OIDCAuthProvider.java | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index a9c44010950..4b6c575cfaf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -54,15 +54,15 @@ public class OIDCAuthProvider extends AbstractOAuth2AuthenticationProvider { protected String title = "Open ID Connect"; protected List scope = Arrays.asList("openid", "email", "profile"); - Issuer issuer; - ClientAuthentication clientAuth; - OIDCProviderMetadata idpMetadata; + final Issuer issuer; + final ClientAuthentication clientAuth; + final OIDCProviderMetadata idpMetadata; public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEndpointURL) throws AuthorizationSetupException { this.clientSecret = aClientSecret; // nedded for state creation this.clientAuth = new ClientSecretBasic(new ClientID(aClientId), new Secret(aClientSecret)); this.issuer = new Issuer(issuerEndpointURL); - getMetadata(); + this.idpMetadata = getMetadata(); } /** @@ -74,7 +74,9 @@ public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEnd * @return false */ @Override - public boolean isDisplayIdentifier() { return false; } + public boolean isDisplayIdentifier() { + return false; + } /** * Setup metadata from OIDC provider during creation of the provider representation @@ -82,9 +84,14 @@ public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEnd * @throws IOException when sth. goes wrong with the retrieval * @throws ParseException when the metadata is not parsable */ - void getMetadata() throws AuthorizationSetupException { + OIDCProviderMetadata getMetadata() throws AuthorizationSetupException { try { - this.idpMetadata = getMetadata(this.issuer); + var metadata = getMetadata(this.issuer); + // Assert that the provider supports the code flow + if (metadata.getResponseTypes().stream().noneMatch(ResponseType::impliesCodeFlow)) { + throw new AuthorizationSetupException("OIDC provider at "+this.issuer.getValue()+" does not support code flow, disabling."); + } + return metadata; } catch (IOException ex) { logger.severe("OIDC provider metadata at \"+issuerEndpointURL+\" not retrievable: "+ex.getMessage()); throw new AuthorizationSetupException("OIDC provider metadata at "+this.issuer.getValue()+" not retrievable."); @@ -92,11 +99,6 @@ void getMetadata() throws AuthorizationSetupException { logger.severe("OIDC provider metadata at \"+issuerEndpointURL+\" not parsable: "+ex.getMessage()); throw new AuthorizationSetupException("OIDC provider metadata at "+this.issuer.getValue()+" not parsable."); } - - // Assert that the provider supports the code flow - if (! this.idpMetadata.getResponseTypes().stream().filter(idp -> idp.impliesCodeFlow()).findAny().isPresent()) { - throw new AuthorizationSetupException("OIDC provider at "+this.issuer.getValue()+" does not support code flow, disabling."); - } } /** From 7f2c1918e628e4b7ed6ffdf4420f8195c5f456b0 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 9 Jan 2023 14:05:42 -0500 Subject: [PATCH 0021/1525] initial setting --- .../java/edu/harvard/iq/dataverse/settings/JvmSettings.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index bc5a73cd958..51429d39899 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -64,6 +64,9 @@ public enum JvmSettings { SCOPE_API(PREFIX, "api"), API_SIGNING_SECRET(SCOPE_API, "signing-secret"), + // METADATA SETTINGS + SCOPE_METADATA(PREFIX, "metadata"), + METADATA_BLOCK_SYSTEM_METADATA_KEYS(SCOPE_METADATA, "block-system-metadata-keys"), ; private static final String SCOPE_SEPARATOR = "."; From 935f50c04e21f86cf101e19b764fb526762bb7b0 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 9 Jan 2023 14:06:01 -0500 Subject: [PATCH 0022/1525] UI cache for system md block setting --- .../harvard/iq/dataverse/SettingsWrapper.java | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index bf36f265743..ec6ad24a9c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -6,6 +6,7 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.branding.BrandingUtil; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.Setting; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key; @@ -35,6 +36,7 @@ import javax.faces.validator.ValidatorException; import javax.faces.view.ViewScoped; import javax.inject.Named; +import javax.json.Json; import javax.json.JsonObject; import javax.mail.internet.InternetAddress; @@ -115,6 +117,8 @@ public class SettingsWrapper implements java.io.Serializable { private Boolean customLicenseAllowed = null; + private JsonObject systemMetadataBlocks; + private Set alwaysMuted = null; private Set neverMuted = null; @@ -700,4 +704,17 @@ public boolean isCustomLicenseAllowed() { } return customLicenseAllowed; } + + public JsonObject getSystemMetadataBlocks() { + if (systemMetadataBlocks == null) { + String smdbString = JvmSettings.METADATA_BLOCK_SYSTEM_METADATA_KEYS.lookup(); + if (smdbString != null) { + systemMetadataBlocks = JsonUtil.getJsonObject(smdbString); + } + if (systemMetadataBlocks == null) { + systemMetadataBlocks = Json.createObjectBuilder().build(); + } + } + return systemMetadataBlocks; + } } \ No newline at end of file From 549d8138fb851990ca41b1f54f0cd1f392b3f05f Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 9 Jan 2023 14:06:19 -0500 Subject: [PATCH 0023/1525] Don't show system md blocks in edit. --- .../java/edu/harvard/iq/dataverse/DatasetVersionUI.java | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java index d09457c86bf..bea1a71a82c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java @@ -23,6 +23,7 @@ import javax.ejb.EJB; import javax.faces.view.ViewScoped; import javax.inject.Named; +import javax.json.JsonObject; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; @@ -35,6 +36,9 @@ public class DatasetVersionUI implements Serializable { @EJB DataverseServiceBean dataverseService; + @EJB + SettingsWrapper settingsWrapper; + @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; @@ -400,6 +404,9 @@ public void setMetadataValueBlocks(DatasetVersion datasetVersion) { //TODO: A lot of clean up on the logic of this method metadataBlocksForView.clear(); metadataBlocksForEdit.clear(); + + JsonObject systemMDBlocks = settingsWrapper.getSystemMetadataBlocks(); + Long dvIdForInputLevel = datasetVersion.getDataset().getOwner().getId(); if (!dataverseService.find(dvIdForInputLevel).isMetadataBlockRoot()){ @@ -442,7 +449,7 @@ public void setMetadataValueBlocks(DatasetVersion datasetVersion) { if (!datasetFieldsForView.isEmpty()) { metadataBlocksForView.put(mdb, datasetFieldsForView); } - if (!datasetFieldsForEdit.isEmpty()) { + if (!datasetFieldsForEdit.isEmpty() && !systemMDBlocks.containsKey(mdb.getName())) { metadataBlocksForEdit.put(mdb, datasetFieldsForEdit); } } From 0c7db6614669ecc40e96ffb029be4f21ed04f4db Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:27:55 +0100 Subject: [PATCH 0024/1525] chore(deps): update Testcontainers to latest version Also updating Postgres Server version in "tc" Maven profile. --- modules/dataverse-parent/pom.xml | 2 +- pom.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 3911e9d5bbb..e316a5508ce 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -167,7 +167,7 @@ 5.0.0-RC2 - 1.15.0 + 1.17.6 2.10.1 4.13.1 diff --git a/pom.xml b/pom.xml index a5d52fd7545..56871c7fd56 100644 --- a/pom.xml +++ b/pom.xml @@ -757,7 +757,7 @@ tc true - 9.6 + 13.0 From 5681d24520ac017eb925bc058ecaef877eedd14b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:28:52 +0100 Subject: [PATCH 0025/1525] chore(deps): add Keycloak Testcontainer module for tests --- pom.xml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pom.xml b/pom.xml index 56871c7fd56..63c362ba904 100644 --- a/pom.xml +++ b/pom.xml @@ -570,6 +570,12 @@ postgresql test + + com.github.dasniko + testcontainers-keycloak + 2.4.0 + test + org.mockito mockito-core From 9f534c4e4a59d7b33b9a0e4a5a876819e9278c47 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:32:12 +0100 Subject: [PATCH 0026/1525] feat(tests): extend JvmSetting helper for test class method references Instead of only allowing to supply static String values for a setting, also allow referencing a static method in the test class to retrieve dynamic data. This is inspired by the JUnit5 MethodSource example. --- .../iq/dataverse/util/testing/JvmSetting.java | 6 ++++- .../util/testing/JvmSettingExtension.java | 25 ++++++++++++++++++- 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java index f54cadaf253..85b10489f15 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java @@ -39,6 +39,8 @@ @ResourceLock(value = Resources.SYSTEM_PROPERTIES, mode = ResourceAccessMode.READ_WRITE) public @interface JvmSetting { + static final String PLACEHOLDER = "NULL"; + /** * The key of the system property to be set. */ @@ -47,10 +49,12 @@ /** * The value of the system property to be set. */ - String value(); + String value() default PLACEHOLDER; String[] varArgs() default {}; + String method() default PLACEHOLDER; + /** * Containing annotation of repeatable {@code @SetSystemProperty}. */ diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java index 56e87589139..17728e75ffc 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java @@ -5,6 +5,11 @@ import org.junit.jupiter.api.extension.BeforeTestExecutionCallback; import org.junit.jupiter.api.extension.ExtensionContext; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; + +import static edu.harvard.iq.dataverse.util.testing.JvmSetting.PLACEHOLDER; + public class JvmSettingExtension implements BeforeTestExecutionCallback, AfterTestExecutionCallback { private ExtensionContext.Store getStore(ExtensionContext context) { @@ -28,7 +33,25 @@ public void beforeTestExecution(ExtensionContext extensionContext) throws Except } // set to new value - System.setProperty(settingName, setting.value()); + if (setting.value().equals(PLACEHOLDER) && setting.method().equals(PLACEHOLDER)) { + throw new IllegalArgumentException("You must either provide a value or a method reference " + + "for key JvmSettings." + setting.key()); + } + + // retrieve value from static test class method if no setting given + if (setting.value().equals(PLACEHOLDER)) { + extensionContext.getTestClass().ifPresent(klass -> { + try { + Method valueMethod = klass.getDeclaredMethod(setting.method()); + valueMethod.setAccessible(true); + System.setProperty(settingName, (String)valueMethod.invoke(null)); + } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { + throw new RuntimeException(e); + } + }); + } else { + System.setProperty(settingName, setting.value()); + } } }); } From 5cd9f2eb8bd01b88cde28e41c8b27c52656c62b9 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:40:13 +0100 Subject: [PATCH 0027/1525] doc(dev): add description for method references in @JvmSetting helper --- doc/sphinx-guides/source/developers/testing.rst | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 4b3d5fd0a55..2d1948449a9 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -89,8 +89,12 @@ For unit tests, the most interesting part is to set a JVM setting just for the c Please use the ``@JvmSetting(key = JvmSettings.XXX, value = "")`` annotation on a test method or a test class to set and clear the property automatically. -To set arbitrary system properties for the current test, a similar extension -``@SystemProperty(key = "", value = "")`` has been added. +Inspired by JUnit's ``@MethodSource`` annotation, you may use ``@JvmSetting(key = JvmSettings.XXX, method = "zzz")`` +to reference a method located in the same test class by name (i. e. ``private static String zzz() {}``) to allow +retrieving dynamic data instead of String constants only. (Note the requirement for a *static* method!) + +To set arbitrary system properties for the current test, a similar extension ``@SystemProperty(key = "", value = "")`` +has been added. (Note: it does not support method references.) Both extensions will ensure the global state of system properties is non-interfering for test executions. Tests using these extensions will be executed in serial. From ebd8eede980fa1b3cce3e2f30538c9a79c180eb2 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:45:04 +0100 Subject: [PATCH 0028/1525] feat(settings): add authentication settings for OIDC to JvmSettings #9268 --- .../harvard/iq/dataverse/settings/JvmSettings.java | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index bc5a73cd958..46b79b06466 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -64,6 +64,17 @@ public enum JvmSettings { SCOPE_API(PREFIX, "api"), API_SIGNING_SECRET(SCOPE_API, "signing-secret"), + // AUTH SETTINGS + SCOPE_AUTH(PREFIX, "auth"), + // AUTH: OIDC SETTINGS + SCOPE_OIDC(SCOPE_AUTH, "oidc"), + OIDC_ENABLED(SCOPE_OIDC, "enabled"), + OIDC_TITLE(SCOPE_OIDC, "title"), + OIDC_SUBTITLE(SCOPE_OIDC, "subtitle"), + OIDC_AUTH_SERVER_URL(SCOPE_OIDC, "auth-server-url"), + OIDC_CLIENT_ID(SCOPE_OIDC, "client-id"), + OIDC_CLIENT_SECRET(SCOPE_OIDC, "client-secret"), + ; private static final String SCOPE_SEPARATOR = "."; From 1bff1be736a1362dd779be66415919961fb44599 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:48:18 +0100 Subject: [PATCH 0029/1525] feat(auth): add OIDC provider provisioning via MPCONFIG #9268 Only one provider can be configured via MPCONFIG for now. The provider is configured with an appropriate ID to distinguish it from other providers configured via the API. It can be configured in addition to other OIDC providers when desired. --- ...ationProvidersRegistrationServiceBean.java | 10 ++++++++++ .../OIDCAuthenticationProviderFactory.java | 20 +++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java index 6289865baf0..79dabe1d390 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java @@ -17,6 +17,7 @@ import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2AuthenticationProviderFactory; import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactory; import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProviderFactory; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean; import java.util.HashMap; import java.util.Map; @@ -121,6 +122,15 @@ public void startup() { logger.log(Level.SEVERE, "Exception setting up the authentication provider '" + row.getId() + "': " + ex.getMessage(), ex); } }); + + // Add providers registered via MPCONFIG + if (JvmSettings.OIDC_ENABLED.lookupOptional(Boolean.class).orElse(false)) { + try { + registerProvider(OIDCAuthenticationProviderFactory.buildFromSettings()); + } catch (AuthorizationSetupException e) { + logger.log(Level.SEVERE, "Exception setting up an OIDC auth provider via MicroProfile Config", e); + } + } } private void registerProviderFactory(AuthenticationProviderFactory aFactory) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java index c6d1a28e19d..f4d631adea3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java @@ -5,6 +5,7 @@ import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderFactory; import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderRow; import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2AuthenticationProviderFactory; +import edu.harvard.iq.dataverse.settings.JvmSettings; import java.util.Map; @@ -44,4 +45,23 @@ public AuthenticationProvider buildProvider( AuthenticationProviderRow aRow ) th return oidc; } + + /** + * Build an OIDC provider from MicroProfile Config provisioned details + * @return The configured auth provider + * @throws AuthorizationSetupException + */ + public static AuthenticationProvider buildFromSettings() throws AuthorizationSetupException { + OIDCAuthProvider oidc = new OIDCAuthProvider( + JvmSettings.OIDC_CLIENT_ID.lookup(), + JvmSettings.OIDC_CLIENT_SECRET.lookup(), + JvmSettings.OIDC_AUTH_SERVER_URL.lookup() + ); + + oidc.setId("oidc-mpconfig"); + oidc.setTitle(JvmSettings.OIDC_TITLE.lookupOptional().orElse("OpenID Connect")); + oidc.setSubTitle(JvmSettings.OIDC_SUBTITLE.lookupOptional().orElse("OpenID Connect")); + + return oidc; + } } From fb11096562269d3704dd74504b6e665a6a6a843e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:48:48 +0100 Subject: [PATCH 0030/1525] style(auth): slight reformat of OIDC provider factory #9268 --- .../oauth2/oidc/OIDCAuthenticationProviderFactory.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java index f4d631adea3..89cf1cb986d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java @@ -38,7 +38,12 @@ public String getInfo() { public AuthenticationProvider buildProvider( AuthenticationProviderRow aRow ) throws AuthorizationSetupException { Map factoryData = OAuth2AuthenticationProviderFactory.parseFactoryData(aRow.getFactoryData()); - OIDCAuthProvider oidc = new OIDCAuthProvider(factoryData.get("clientId"), factoryData.get("clientSecret"), factoryData.get("issuer")); + OIDCAuthProvider oidc = new OIDCAuthProvider( + factoryData.get("clientId"), + factoryData.get("clientSecret"), + factoryData.get("issuer") + ); + oidc.setId(aRow.getId()); oidc.setTitle(aRow.getTitle()); oidc.setSubTitle(aRow.getSubtitle()); From 1fb0f588262a92010c5f0afa52d336a707358a6b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:51:24 +0100 Subject: [PATCH 0031/1525] test(auth): add integration test for OIDC provisioning via MPCONFIG #9268 Using Testcontainers to start a Keycloak instance with our default development realm, the provider is created using MPCONFIG settings. --- .../OIDCAuthenticationProviderFactoryIT.java | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java new file mode 100644 index 00000000000..53cfcca2742 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -0,0 +1,37 @@ +package edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc; + +import dasniko.testcontainers.keycloak.KeycloakContainer; +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +@Tag("testcontainers") +@Testcontainers +class OIDCAuthenticationProviderFactoryIT { + + static final String clientId = "oidc-client"; + static final String clientSecret = "ss6gE8mODCDfqesQaSG3gwUwZqZt547E"; + static final String realm = "oidc-realm"; + + @Container + static KeycloakContainer keycloakContainer = new KeycloakContainer().withRealmImportFile("keycloak/oidc-realm.json"); + + // simple method to retrieve the issuer URL, referenced to by @JvmSetting annotations + private static String getAuthUrl() { + return keycloakContainer.getAuthServerUrl() + "realms/" + realm; + } + + @Test + @JvmSetting(key = JvmSettings.OIDC_CLIENT_ID, value = clientId) + @JvmSetting(key = JvmSettings.OIDC_CLIENT_SECRET, value = clientSecret) + @JvmSetting(key = JvmSettings.OIDC_AUTH_SERVER_URL, method = "getAuthUrl") + void testCreateProvider() throws Exception { + OIDCAuthProvider oidcAuthProvider = (OIDCAuthProvider) OIDCAuthenticationProviderFactory.buildFromSettings(); + assertTrue(oidcAuthProvider.getMetadata().getTokenEndpointURI().toString().startsWith(keycloakContainer.getAuthServerUrl())); + } +} \ No newline at end of file From e31dba3da3dc267e963c537da4d0076ed11eee44 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:54:39 +0100 Subject: [PATCH 0032/1525] build(auth): make resources in /conf avail to tests #9268 To use data in /conf for tests, adding the folder in Maven to copy them to the test classpath as resources helps to use them in tests very easily. All dirs under /conf will be copied to the /target/test-classes directory recursively. This also works when running tests in IDEs like IntelliJ. --- pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pom.xml b/pom.xml index 63c362ba904..a26071d253b 100644 --- a/pom.xml +++ b/pom.xml @@ -596,6 +596,11 @@ + + + ${project.basedir}/conf + + - + src/main/java From bd49930b8936b4edfb15526705bf498a0f38b929 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 18 Jan 2023 15:44:58 -0500 Subject: [PATCH 0035/1525] Bug fix in semantic api add method --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index c3d262a20db..73ea0b98cd1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -698,8 +698,10 @@ public Response updateVersionMetadata(String jsonLDBody, @PathParam("id") String try { Dataset ds = findDatasetOrDie(id); DataverseRequest req = createDataverseRequest(findUserOrDie()); - DatasetVersion dsv = ds.getOrCreateEditVersion(); + //Check if latest existing version is draft boolean updateDraft = ds.getLatestVersion().isDraft(); + //Then get a draft version - the latest or a new one as needed + DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv = JSONLDUtil.updateDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, datasetFieldSvc, !replaceTerms, false, licenseSvc); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dsv.getTermsOfUseAndAccess(), null); From 4cccef3f764f34ed7b6d0492e1e8b05acb03c96c Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 18 Jan 2023 15:51:37 -0500 Subject: [PATCH 0036/1525] more bugs re: semantic methods --- .../edu/harvard/iq/dataverse/api/Datasets.java | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 73ea0b98cd1..e3282983b0d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -733,8 +733,11 @@ public Response deleteMetadata(String jsonLDBody, @PathParam("id") String id) { try { Dataset ds = findDatasetOrDie(id); DataverseRequest req = createDataverseRequest(findUserOrDie()); - DatasetVersion dsv = ds.getOrCreateEditVersion(); + //Check if latest existing version is draft boolean updateDraft = ds.getLatestVersion().isDraft(); + //Then get a draft version - the latest or a new one as needed + DatasetVersion dsv = ds.getOrCreateEditVersion(); + dsv = JSONLDUtil.deleteDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, licenseSvc); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); DatasetVersion managedVersion; @@ -771,6 +774,9 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); + //Check if latest existing version is draft + boolean updateDraft = ds.getLatestVersion().isDraft(); + //Then get a draft version - the latest or a new one as needed DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); List fields = new LinkedList<>(); @@ -881,8 +887,6 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav } } - - boolean updateDraft = ds.getLatestVersion().isDraft(); DatasetVersion managedVersion = updateDraft ? execCommand(new UpdateDatasetVersionCommand(ds, req)).getOrCreateEditVersion() : execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); @@ -934,6 +938,9 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); + //Check if latest existing version is draft + boolean updateDraft = ds.getLatestVersion().isDraft(); + //Then get a draft version - the latest or a new one as needed DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); List fields = new LinkedList<>(); @@ -1036,7 +1043,6 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque dsv.getDatasetFields().add(updateField); } } - boolean updateDraft = ds.getLatestVersion().isDraft(); DatasetVersion managedVersion; if (updateDraft) { From 0b3770efa9f8dbbf159f8eab5fce707940cacc95 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 18 Jan 2023 16:40:32 -0500 Subject: [PATCH 0037/1525] Revert "more bugs re: semantic methods" This reverts commit 4cccef3f764f34ed7b6d0492e1e8b05acb03c96c. --- .../edu/harvard/iq/dataverse/api/Datasets.java | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index e3282983b0d..73ea0b98cd1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -733,11 +733,8 @@ public Response deleteMetadata(String jsonLDBody, @PathParam("id") String id) { try { Dataset ds = findDatasetOrDie(id); DataverseRequest req = createDataverseRequest(findUserOrDie()); - //Check if latest existing version is draft - boolean updateDraft = ds.getLatestVersion().isDraft(); - //Then get a draft version - the latest or a new one as needed DatasetVersion dsv = ds.getOrCreateEditVersion(); - + boolean updateDraft = ds.getLatestVersion().isDraft(); dsv = JSONLDUtil.deleteDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, licenseSvc); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); DatasetVersion managedVersion; @@ -774,9 +771,6 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); - //Check if latest existing version is draft - boolean updateDraft = ds.getLatestVersion().isDraft(); - //Then get a draft version - the latest or a new one as needed DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); List fields = new LinkedList<>(); @@ -887,6 +881,8 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav } } + + boolean updateDraft = ds.getLatestVersion().isDraft(); DatasetVersion managedVersion = updateDraft ? execCommand(new UpdateDatasetVersionCommand(ds, req)).getOrCreateEditVersion() : execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); @@ -938,9 +934,6 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); - //Check if latest existing version is draft - boolean updateDraft = ds.getLatestVersion().isDraft(); - //Then get a draft version - the latest or a new one as needed DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); List fields = new LinkedList<>(); @@ -1043,6 +1036,7 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque dsv.getDatasetFields().add(updateField); } } + boolean updateDraft = ds.getLatestVersion().isDraft(); DatasetVersion managedVersion; if (updateDraft) { From ef13bcdb3221dc54db1d2cff5630bb4f52d74c15 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 18 Jan 2023 16:40:41 -0500 Subject: [PATCH 0038/1525] Revert "Bug fix in semantic api add method" This reverts commit bd49930b8936b4edfb15526705bf498a0f38b929. --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 73ea0b98cd1..c3d262a20db 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -698,10 +698,8 @@ public Response updateVersionMetadata(String jsonLDBody, @PathParam("id") String try { Dataset ds = findDatasetOrDie(id); DataverseRequest req = createDataverseRequest(findUserOrDie()); - //Check if latest existing version is draft - boolean updateDraft = ds.getLatestVersion().isDraft(); - //Then get a draft version - the latest or a new one as needed DatasetVersion dsv = ds.getOrCreateEditVersion(); + boolean updateDraft = ds.getLatestVersion().isDraft(); dsv = JSONLDUtil.updateDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, datasetFieldSvc, !replaceTerms, false, licenseSvc); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dsv.getTermsOfUseAndAccess(), null); From ef233e277c92bca3155a26c16671e0d9f71833c5 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 19 Jan 2023 09:17:43 -0500 Subject: [PATCH 0039/1525] @Inject works, @EJB doesn't --- .../edu/harvard/iq/dataverse/DatasetVersionUI.java | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java index bea1a71a82c..f080f34c437 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java @@ -6,23 +6,17 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.util.MarkupChecker; -import edu.harvard.iq.dataverse.util.StringUtil; import java.io.Serializable; -import java.sql.Timestamp; -import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Comparator; import java.util.Date; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.TreeMap; -import static java.util.stream.Collectors.toList; import javax.ejb.EJB; import javax.faces.view.ViewScoped; -import javax.inject.Named; +import javax.inject.Inject; import javax.json.JsonObject; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; @@ -36,7 +30,7 @@ public class DatasetVersionUI implements Serializable { @EJB DataverseServiceBean dataverseService; - @EJB + @Inject SettingsWrapper settingsWrapper; @PersistenceContext(unitName = "VDCNet-ejbPU") From bb7cf314afd380e93c04a8bc64def9e8e5bcd3d6 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 19 Jan 2023 09:19:19 -0500 Subject: [PATCH 0040/1525] Adding comment --- .../java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java index 127632bf711..90829e77b46 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java @@ -348,7 +348,9 @@ public static DatasetVersion deleteDatasetVersionMDFromJsonLD(DatasetVersion dsv * @return null if exact match, otherwise return a field without the value to be deleted */ private static DatasetField getReplacementField(DatasetField dsf, JsonArray valArray) { - // TODO Auto-generated method stub + // TODO Parse valArray and remove any matching entries in the dsf + // Until then, delete removes all values of a multivalued field + // Doing this on a required field will fail. return null; } From a8981f3a2f4ce6bdd1aa35f9b6694b8ebfc282c2 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 19 Jan 2023 09:20:16 -0500 Subject: [PATCH 0041/1525] add commented-out entry to microprofile properties --- src/main/resources/META-INF/microprofile-config.properties | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index cde43fbff91..3befbe19b1c 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -37,3 +37,6 @@ dataverse.oai.server.maxsets=100 # the OAI repository name, as shown by the Identify verb, # can be customized via the setting below: #dataverse.oai.server.repositoryname= + +# METADATA SETTINGS +#dataverse.metadata.block-system-metadata-keys From 3f795af77b5c98123ea4f649606e4fed36701406 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 19 Jan 2023 09:27:28 -0500 Subject: [PATCH 0042/1525] methods to identify changed md blocks --- .../dataverse/DatasetVersionDifference.java | 102 +++++++++++++++++- 1 file changed, 98 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java index e844a3f1ca8..1b0169f88ba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java @@ -2,28 +2,29 @@ import edu.harvard.iq.dataverse.datavariable.DataVariable; import edu.harvard.iq.dataverse.datavariable.VarGroup; -import edu.harvard.iq.dataverse.datavariable.VariableMetadata; import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil; import edu.harvard.iq.dataverse.util.StringUtil; import java.util.ArrayList; import java.util.Collections; -import java.util.Collection; import java.util.List; import java.util.Set; +import java.util.logging.Logger; import org.apache.commons.lang3.StringUtils; import edu.harvard.iq.dataverse.util.BundleUtil; -import edu.harvard.iq.dataverse.util.FileUtil; - import java.util.Arrays; import java.util.Date; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedList; /** * * @author skraffmiller */ public final class DatasetVersionDifference { + private static final Logger logger = Logger.getLogger(DatasetVersionDifference.class.getCanonicalName()); private DatasetVersion newVersion; private DatasetVersion originalVersion; @@ -1713,4 +1714,97 @@ public void setDatasetFilesDiffList(List datasetFiles this.datasetFilesDiffList = datasetFilesDiffList; } + /* + * Static methods to compute which blocks have changes between the two + * DatasetVersions. Currently used to assess whether 'system metadatablocks' + * (protected by a separate key) have changed. (Simplified from the methods + * above that track all the individual changes) + * + */ + public static Set getBlocksWithChanges(DatasetVersion newVersion, DatasetVersion originalVersion) { + Set changedBlockSet = new HashSet(); + + //Compare Data + List newDatasetFields = new LinkedList(newVersion.getDatasetFields()); + List originalDatasetFields = new LinkedList(originalVersion.getDatasetFields()); + Iterator dsfoIter = originalDatasetFields.listIterator(); + while(dsfoIter.hasNext()) { + DatasetField dsfo = dsfoIter.next(); + boolean deleted = true; + Iterator dsfnIter = newDatasetFields.listIterator(); + + while (dsfnIter.hasNext()) { + DatasetField dsfn = dsfnIter.next(); + if (dsfo.getDatasetFieldType().equals(dsfn.getDatasetFieldType())) { + deleted = false; + if (!changedBlockSet.contains(dsfo.getDatasetFieldType().getMetadataBlock())) { + logger.fine("Checking " + dsfo.getDatasetFieldType().getName()); + if (dsfo.getDatasetFieldType().isPrimitive()) { + if (fieldsAreDifferent(dsfo, dsfn, false)) { + logger.fine("Adding block for " + dsfo.getDatasetFieldType().getName()); + changedBlockSet.add(dsfo.getDatasetFieldType().getMetadataBlock()); + } + } else { + if (fieldsAreDifferent(dsfo, dsfn, true)) { + logger.fine("Adding block for " + dsfo.getDatasetFieldType().getName()); + changedBlockSet.add(dsfo.getDatasetFieldType().getMetadataBlock()); + } + } + } + dsfnIter.remove(); + break; // if found go to next dataset field + } + } + + if (deleted) { + logger.fine("Adding block for deleted " + dsfo.getDatasetFieldType().getName()); + changedBlockSet.add(dsfo.getDatasetFieldType().getMetadataBlock()); + } + dsfoIter.remove(); + } + //Only fields left are non-matching ones but they may be empty + for (DatasetField dsfn : newDatasetFields) { + if(!dsfn.isEmpty()) { + logger.fine("Adding block for added " + dsfn.getDatasetFieldType().getName()); + changedBlockSet.add(dsfn.getDatasetFieldType().getMetadataBlock()); + } + } + return changedBlockSet; + } + + private static boolean fieldsAreDifferent(DatasetField originalField, DatasetField newField, boolean compound) { + String originalValue = ""; + String newValue = ""; + + if (compound) { + for (DatasetFieldCompoundValue datasetFieldCompoundValueOriginal : originalField + .getDatasetFieldCompoundValues()) { + int loopIndex = 0; + if (newField.getDatasetFieldCompoundValues().size() >= loopIndex + 1) { + for (DatasetField dsfo : datasetFieldCompoundValueOriginal.getChildDatasetFields()) { + if (!dsfo.getDisplayValue().isEmpty()) { + originalValue += dsfo.getDisplayValue() + ", "; + } + } + for (DatasetField dsfn : newField.getDatasetFieldCompoundValues().get(loopIndex) + .getChildDatasetFields()) { + if (!dsfn.getDisplayValue().isEmpty()) { + newValue += dsfn.getDisplayValue() + ", "; + } + } + if (!originalValue.trim().equals(newValue.trim())) { + return true; + } + } + loopIndex++; + } + } else { + originalValue = originalField.getDisplayValue(); + newValue = newField.getDisplayValue(); + if (!originalValue.equalsIgnoreCase(newValue)) { + return true; + } + } + return false; + } } From 4422940a678130dac8a7fdf6c4859242383c41ef Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 19 Jan 2023 09:29:17 -0500 Subject: [PATCH 0043/1525] implementation of system md block checks in Update/Create dsv commands --- .../dataverse/MetadataBlockServiceBean.java | 2 +- .../harvard/iq/dataverse/SettingsWrapper.java | 2 +- .../engine/command/DataverseRequest.java | 11 +++++-- .../command/impl/AbstractDatasetCommand.java | 31 +++++++++++++++++++ .../impl/CreateDatasetVersionCommand.java | 9 ++++++ .../impl/UpdateDatasetVersionCommand.java | 23 ++++++++++++-- 6 files changed, 72 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java index a25480102f2..d8a0e5fb56f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java @@ -18,7 +18,7 @@ public class MetadataBlockServiceBean { @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; - + public static final String SYSTEM_MD_KEY="mdkey"; public MetadataBlock save(MetadataBlock mdb) { return em.merge(mdb); diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index ec6ad24a9c6..5ee8dd53def 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -707,7 +707,7 @@ public boolean isCustomLicenseAllowed() { public JsonObject getSystemMetadataBlocks() { if (systemMetadataBlocks == null) { - String smdbString = JvmSettings.METADATA_BLOCK_SYSTEM_METADATA_KEYS.lookup(); + String smdbString = JvmSettings.METADATA_BLOCK_SYSTEM_METADATA_KEYS.lookupOptional().orElse(null); if (smdbString != null) { systemMetadataBlocks = JsonUtil.getJsonObject(smdbString); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java index f0cba005a4a..d0e5ce04002 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java @@ -24,6 +24,7 @@ public class DataverseRequest { private final User user; private final IpAddress sourceAddress; private final String invocationId; + private final HttpServletRequest httpServletRequest; private final static String undefined = "0.0.0.0"; @@ -55,11 +56,12 @@ public class DataverseRequest { public DataverseRequest(User aUser, HttpServletRequest aHttpServletRequest) { this.user = aUser; - + httpServletRequest = aHttpServletRequest; + IpAddress address = null; if (aHttpServletRequest != null) { - + if (headerToUse != null) { /* * The optional case of using a header to determine the IP address is discussed @@ -151,6 +153,7 @@ public DataverseRequest( User aUser, IpAddress aSourceAddress ) { user = aUser; sourceAddress = aSourceAddress; invocationId=null; + httpServletRequest=null; } public User getUser() { @@ -187,4 +190,8 @@ public String getWFInvocationId() { return invocationId; } + public HttpServletRequest getHttpServletRequest() { + return httpServletRequest; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java index f3b75d23c63..c14a0392fd4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.DatasetVersionDifference; import edu.harvard.iq.dataverse.DatasetVersionUser; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -13,16 +14,24 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import java.sql.Timestamp; import java.util.Date; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import static java.util.stream.Collectors.joining; + +import javax.json.JsonObject; +import javax.servlet.http.HttpServletRequest; import javax.validation.ConstraintViolation; import edu.harvard.iq.dataverse.GlobalIdServiceBean; +import edu.harvard.iq.dataverse.MetadataBlock; +import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.pidproviders.FakePidProviderServiceBean; +import edu.harvard.iq.dataverse.settings.JvmSettings; /** * @@ -212,4 +221,26 @@ public void setDataset(Dataset dataset) { protected Timestamp getTimestamp() { return timestamp; } + + protected void checkSystemMetadataKeyIfNeeded(DatasetVersion newVersion, DatasetVersion persistedVersion) throws IllegalCommandException { + Set changedMDBs = DatasetVersionDifference.getBlocksWithChanges(newVersion, persistedVersion); + changedMDBs.forEach(mdb -> {logger.fine(mdb.getName() + " has been changed");}); + + String smdbString = JvmSettings.METADATA_BLOCK_SYSTEM_METADATA_KEYS.lookupOptional().orElse(null); + if (smdbString != null) { + JsonObject systemMetadataBlocks = JsonUtil.getJsonObject(smdbString); + HttpServletRequest httpServletRequest = getRequest().getHttpServletRequest(); + if (httpServletRequest != null) { + String mdKey = httpServletRequest.getParameter(MetadataBlockServiceBean.SYSTEM_MD_KEY); + for (MetadataBlock mdb : changedMDBs) { + if (systemMetadataBlocks.containsKey(mdb.getName())) { + if (mdKey==null || !mdKey.equals(systemMetadataBlocks.getString(mdb.getName()))) { + throw new IllegalCommandException("Updating system metadata requires a key", this); + } + } + } + } + } + + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java index 72439d4ba4a..5794ab01101 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java @@ -36,6 +36,11 @@ public CreateDatasetVersionCommand(DataverseRequest aRequest, Dataset theDataset @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { + /* + * CreateDatasetVersionCommand assumes you have not added your new version to + * the dataset you send. Use UpdateDatasetVersionCommand if you created the new + * version via Dataset.getOrCreateEditVersion() and just want to persist it. + */ DatasetVersion latest = dataset.getLatestVersion(); if ( latest.isWorkingCopy() ) { // A dataset can only have a single draft, which has to be the latest. @@ -44,6 +49,10 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException { throw new IllegalCommandException("Latest version is already a draft. Cannot add another draft", this); } } + + //Will throw an IllegalCommandException if a system metadatablock is changed and the appropriate key is not supplied. + checkSystemMetadataKeyIfNeeded(newVersion, latest); + List newVersionMetadatum = new ArrayList<>(latest.getFileMetadatas().size()); for ( FileMetadata fmd : latest.getFileMetadatas() ) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index 33f64f23076..25706574c9a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -9,6 +9,7 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.DatasetFieldUtil; import edu.harvard.iq.dataverse.util.FileMetadataUtil; @@ -30,11 +31,11 @@ @RequiredPermissions(Permission.EditDataset) public class UpdateDatasetVersionCommand extends AbstractDatasetCommand { - private static final Logger logger = Logger.getLogger(UpdateDatasetVersionCommand.class.getCanonicalName()); + static final Logger logger = Logger.getLogger(UpdateDatasetVersionCommand.class.getCanonicalName()); private final List filesToDelete; private boolean validateLenient = false; private final DatasetVersion clone; - private final FileMetadata fmVarMet; + final FileMetadata fmVarMet; public UpdateDatasetVersionCommand(Dataset theDataset, DataverseRequest aRequest) { super(aRequest, theDataset); @@ -104,6 +105,24 @@ public Dataset execute(CommandContext ctxt) throws CommandException { ctxt.permissions().checkEditDatasetLock(theDataset, getRequest(), this); Dataset savedDataset = null; + DatasetVersion persistedVersion = clone; + /* + * Unless a pre-change clone has been provided, we need to get it from the db. + * There are two cases: We're updating an existing draft, which has an id, and + * exists in the database We've created a new draft, with null id, and we need + * to get the lastest version in the db + * + */ + if(persistedVersion==null) { + logger.info("No Clone"); + Long id = getDataset().getLatestVersion().getId(); + persistedVersion = ctxt.datasetVersion().find(id!=null ? id: getDataset().getLatestVersionForCopy().getId()); + } + + //Will throw an IllegalCommandException if a system metadatablock is changed and the appropriate key is not supplied. + checkSystemMetadataKeyIfNeeded(getDataset().getOrCreateEditVersion(fmVarMet), persistedVersion); + + try { // Invariant: Dataset has no locks preventing the update String lockInfoMessage = "saving current edits"; From 3e36562b435e38656ffc7aad172b82cad240a991 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 19 Jan 2023 09:32:56 -0500 Subject: [PATCH 0044/1525] correcting/simplifying sem api methods these methods were always using the update command (because they checked for isDraft after always creating a draft version). However, since the Create command assumes a new dsv not associated with the dataset yet, they also shouldn't use it. This commit reorders the isDraft check where needed and removes the logic to decide whether to use the Update or Create commands (since Update should always be used). This should be a big no-op since the old code always used Update 'accidentally'). --- .../harvard/iq/dataverse/api/Datasets.java | 43 ++++++------------- 1 file changed, 14 insertions(+), 29 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index c3d262a20db..f6639fb711b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -647,9 +647,6 @@ public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, } managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion)); } -// DatasetVersion managedVersion = execCommand( updateDraft -// ? new UpdateDatasetVersionCommand(req, incomingVersion) -// : new CreateDatasetVersionCommand(req, ds, incomingVersion)); return ok( json(managedVersion) ); } catch (JsonParseException ex) { @@ -698,8 +695,10 @@ public Response updateVersionMetadata(String jsonLDBody, @PathParam("id") String try { Dataset ds = findDatasetOrDie(id); DataverseRequest req = createDataverseRequest(findUserOrDie()); - DatasetVersion dsv = ds.getOrCreateEditVersion(); + //Get draft state as of now boolean updateDraft = ds.getLatestVersion().isDraft(); + //Get the current draft or create a new version to update + DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv = JSONLDUtil.updateDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, datasetFieldSvc, !replaceTerms, false, licenseSvc); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dsv.getTermsOfUseAndAccess(), null); @@ -707,12 +706,8 @@ public Response updateVersionMetadata(String jsonLDBody, @PathParam("id") String return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid")); } DatasetVersion managedVersion; - if (updateDraft) { - Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); - managedVersion = managedDataset.getOrCreateEditVersion(); - } else { - managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); - } + Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); + managedVersion = managedDataset.getLatestVersion(); String info = updateDraft ? "Version Updated" : "Version Created"; return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate())); @@ -731,17 +726,15 @@ public Response deleteMetadata(String jsonLDBody, @PathParam("id") String id) { try { Dataset ds = findDatasetOrDie(id); DataverseRequest req = createDataverseRequest(findUserOrDie()); - DatasetVersion dsv = ds.getOrCreateEditVersion(); + //Get draft state as of now boolean updateDraft = ds.getLatestVersion().isDraft(); + //Get the current draft or create a new version to update + DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv = JSONLDUtil.deleteDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, licenseSvc); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); DatasetVersion managedVersion; - if (updateDraft) { - Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); - managedVersion = managedDataset.getOrCreateEditVersion(); - } else { - managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); - } + Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); + managedVersion = managedDataset.getLatestVersion(); String info = updateDraft ? "Version Updated" : "Version Created"; return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate())); @@ -769,6 +762,7 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); + //Get the current draft or create a new version to update DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); List fields = new LinkedList<>(); @@ -880,10 +874,7 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav } - boolean updateDraft = ds.getLatestVersion().isDraft(); - DatasetVersion managedVersion = updateDraft - ? execCommand(new UpdateDatasetVersionCommand(ds, req)).getOrCreateEditVersion() - : execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); + DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion(); return ok(json(managedVersion)); } catch (JsonParseException ex) { @@ -932,6 +923,7 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); + //Get the current draft or create a new version to update DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); List fields = new LinkedList<>(); @@ -1034,14 +1026,7 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque dsv.getDatasetFields().add(updateField); } } - boolean updateDraft = ds.getLatestVersion().isDraft(); - DatasetVersion managedVersion; - - if (updateDraft) { - managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getOrCreateEditVersion(); - } else { - managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); - } + DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion(); return ok(json(managedVersion)); From eafaa7707123e57a90059824674b0668c6d941c3 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 30 Jan 2023 15:57:28 -0500 Subject: [PATCH 0045/1525] remove always false flag, add harvested flag --- .../java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- .../command/impl/AbstractCreateDatasetCommand.java | 10 +++------- .../command/impl/CreateHarvestedDatasetCommand.java | 2 +- .../engine/command/impl/CreateNewDatasetCommand.java | 10 +++------- 4 files changed, 8 insertions(+), 16 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 429a0d7a4e4..73f86453993 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3595,7 +3595,7 @@ public String save() { //ToDo - could drop use of selectedTemplate and just use the persistent dataset.getTemplate() if ( selectedTemplate != null ) { if ( isSessionUserAuthenticated() ) { - cmd = new CreateNewDatasetCommand(dataset, dvRequestService.getDataverseRequest(), false, selectedTemplate); + cmd = new CreateNewDatasetCommand(dataset, dvRequestService.getDataverseRequest(), selectedTemplate); } else { JH.addMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("dataset.create.authenticatedUsersOnly")); return null; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java index 1465cbd74e2..83b74afeaeb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java @@ -30,15 +30,15 @@ public abstract class AbstractCreateDatasetCommand extends AbstractDatasetComman private static final Logger logger = Logger.getLogger(AbstractCreateDatasetCommand.class.getCanonicalName()); - final protected boolean registrationRequired; + final protected boolean harvested; public AbstractCreateDatasetCommand(Dataset theDataset, DataverseRequest aRequest) { this(theDataset, aRequest, false); } - public AbstractCreateDatasetCommand(Dataset theDataset, DataverseRequest aRequest, boolean isRegistrationRequired) { + public AbstractCreateDatasetCommand(Dataset theDataset, DataverseRequest aRequest, boolean isHarvested) { super(aRequest, theDataset); - registrationRequired = isRegistrationRequired; + harvested=isHarvested; } protected void additionalParameterTests(CommandContext ctxt) throws CommandException { @@ -110,10 +110,6 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // Attempt the registration if importing dataset through the API, or the app (but not harvest) handlePid(theDataset, ctxt); - - if (registrationRequired && (theDataset.getGlobalIdCreateTime() == null)) { - throw new CommandExecutionException("Dataset could not be created. Registration failed", this); - } ctxt.em().persist(theDataset); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateHarvestedDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateHarvestedDatasetCommand.java index 01bcdca1238..78fe519e53d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateHarvestedDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateHarvestedDatasetCommand.java @@ -16,7 +16,7 @@ public class CreateHarvestedDatasetCommand extends AbstractCreateDatasetCommand { public CreateHarvestedDatasetCommand(Dataset theDataset, DataverseRequest aRequest) { - super(theDataset, aRequest); + super(theDataset, aRequest, true); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java index 1efaf14c755..382c3acc005 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java @@ -49,15 +49,11 @@ public class CreateNewDatasetCommand extends AbstractCreateDatasetCommand { private final Dataverse dv; public CreateNewDatasetCommand(Dataset theDataset, DataverseRequest aRequest) { - this( theDataset, aRequest, false); + this( theDataset, aRequest, null); } - public CreateNewDatasetCommand(Dataset theDataset, DataverseRequest aRequest, boolean registrationRequired) { - this( theDataset, aRequest, registrationRequired, null); - } - - public CreateNewDatasetCommand(Dataset theDataset, DataverseRequest aRequest, boolean registrationRequired, Template template) { - super(theDataset, aRequest, registrationRequired); + public CreateNewDatasetCommand(Dataset theDataset, DataverseRequest aRequest, Template template) { + super(theDataset, aRequest); this.template = template; dv = theDataset.getOwner(); } From e368c8a77d5f6addc86ef35fe23248a941966e5d Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 30 Jan 2023 16:06:11 -0500 Subject: [PATCH 0046/1525] Check for system md in create, except for harvested --- .../dataverse/DatasetVersionDifference.java | 80 +++++++++++-------- .../impl/AbstractCreateDatasetCommand.java | 4 + 2 files changed, 50 insertions(+), 34 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java index 1b0169f88ba..eca0c84ae84 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java @@ -1723,50 +1723,62 @@ public void setDatasetFilesDiffList(List datasetFiles */ public static Set getBlocksWithChanges(DatasetVersion newVersion, DatasetVersion originalVersion) { Set changedBlockSet = new HashSet(); - - //Compare Data + + // Compare Data List newDatasetFields = new LinkedList(newVersion.getDatasetFields()); - List originalDatasetFields = new LinkedList(originalVersion.getDatasetFields()); - Iterator dsfoIter = originalDatasetFields.listIterator(); - while(dsfoIter.hasNext()) { - DatasetField dsfo = dsfoIter.next(); - boolean deleted = true; + if (originalVersion == null) { + // Every field is new, just list blocks used Iterator dsfnIter = newDatasetFields.listIterator(); - while (dsfnIter.hasNext()) { DatasetField dsfn = dsfnIter.next(); - if (dsfo.getDatasetFieldType().equals(dsfn.getDatasetFieldType())) { - deleted = false; - if (!changedBlockSet.contains(dsfo.getDatasetFieldType().getMetadataBlock())) { - logger.fine("Checking " + dsfo.getDatasetFieldType().getName()); - if (dsfo.getDatasetFieldType().isPrimitive()) { - if (fieldsAreDifferent(dsfo, dsfn, false)) { - logger.fine("Adding block for " + dsfo.getDatasetFieldType().getName()); - changedBlockSet.add(dsfo.getDatasetFieldType().getMetadataBlock()); - } - } else { - if (fieldsAreDifferent(dsfo, dsfn, true)) { - logger.fine("Adding block for " + dsfo.getDatasetFieldType().getName()); - changedBlockSet.add(dsfo.getDatasetFieldType().getMetadataBlock()); + if (!changedBlockSet.contains(dsfn.getDatasetFieldType().getMetadataBlock())) { + changedBlockSet.add(dsfn.getDatasetFieldType().getMetadataBlock()); + } + } + + } else { + List originalDatasetFields = new LinkedList(originalVersion.getDatasetFields()); + Iterator dsfoIter = originalDatasetFields.listIterator(); + while (dsfoIter.hasNext()) { + DatasetField dsfo = dsfoIter.next(); + boolean deleted = true; + Iterator dsfnIter = newDatasetFields.listIterator(); + + while (dsfnIter.hasNext()) { + DatasetField dsfn = dsfnIter.next(); + if (dsfo.getDatasetFieldType().equals(dsfn.getDatasetFieldType())) { + deleted = false; + if (!changedBlockSet.contains(dsfo.getDatasetFieldType().getMetadataBlock())) { + logger.fine("Checking " + dsfo.getDatasetFieldType().getName()); + if (dsfo.getDatasetFieldType().isPrimitive()) { + if (fieldsAreDifferent(dsfo, dsfn, false)) { + logger.fine("Adding block for " + dsfo.getDatasetFieldType().getName()); + changedBlockSet.add(dsfo.getDatasetFieldType().getMetadataBlock()); + } + } else { + if (fieldsAreDifferent(dsfo, dsfn, true)) { + logger.fine("Adding block for " + dsfo.getDatasetFieldType().getName()); + changedBlockSet.add(dsfo.getDatasetFieldType().getMetadataBlock()); + } } } + dsfnIter.remove(); + break; // if found go to next dataset field } - dsfnIter.remove(); - break; // if found go to next dataset field } - } - if (deleted) { - logger.fine("Adding block for deleted " + dsfo.getDatasetFieldType().getName()); - changedBlockSet.add(dsfo.getDatasetFieldType().getMetadataBlock()); + if (deleted) { + logger.fine("Adding block for deleted " + dsfo.getDatasetFieldType().getName()); + changedBlockSet.add(dsfo.getDatasetFieldType().getMetadataBlock()); + } + dsfoIter.remove(); } - dsfoIter.remove(); - } - //Only fields left are non-matching ones but they may be empty - for (DatasetField dsfn : newDatasetFields) { - if(!dsfn.isEmpty()) { - logger.fine("Adding block for added " + dsfn.getDatasetFieldType().getName()); - changedBlockSet.add(dsfn.getDatasetFieldType().getMetadataBlock()); + // Only fields left are non-matching ones but they may be empty + for (DatasetField dsfn : newDatasetFields) { + if (!dsfn.isEmpty()) { + logger.fine("Adding block for added " + dsfn.getDatasetFieldType().getName()); + changedBlockSet.add(dsfn.getDatasetFieldType().getMetadataBlock()); + } } } return changedBlockSet; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java index 83b74afeaeb..1d900cb7234 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java @@ -83,6 +83,10 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // without persisting the new version, or altering its files. new CreateDatasetVersionCommand(getRequest(), theDataset, dsv).prepareDatasetAndVersion(); + if(!harvested) { + checkSystemMetadataKeyIfNeeded(dsv, null); + } + theDataset.setCreator((AuthenticatedUser) getRequest().getUser()); theDataset.setCreateDate(getTimestamp()); From 11cea54b1689693a1b981902ab9bcd15df2c73e9 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 30 Jan 2023 16:34:24 -0500 Subject: [PATCH 0047/1525] add MetadataBlock to test DatasetFieldType needed for system Metadata check in CreateDatasetVersionCommand. --- src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java b/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java index cc4740e564c..7d6e663a547 100644 --- a/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java +++ b/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java @@ -194,6 +194,9 @@ public static DatasetFieldType makeDatasetFieldType() { final Long id = nextId(); DatasetFieldType retVal = new DatasetFieldType("SampleType-"+id, FieldType.TEXT, false); retVal.setId(id); + MetadataBlock mdb = new MetadataBlock(); + mdb.setName("Test"); + retVal.setMetadataBlock(mdb); return retVal; } From a36ab7820ac52e6d3333e05464072645c82c2306 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 14 Feb 2023 17:10:11 -0500 Subject: [PATCH 0048/1525] Fix checkstyle issue --- .../dataverse/engine/command/impl/AbstractDatasetCommand.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java index c14a0392fd4..391f0d1d1a5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java @@ -224,7 +224,9 @@ protected Timestamp getTimestamp() { protected void checkSystemMetadataKeyIfNeeded(DatasetVersion newVersion, DatasetVersion persistedVersion) throws IllegalCommandException { Set changedMDBs = DatasetVersionDifference.getBlocksWithChanges(newVersion, persistedVersion); - changedMDBs.forEach(mdb -> {logger.fine(mdb.getName() + " has been changed");}); + changedMDBs.forEach(mdb -> { + logger.fine(mdb.getName() + " has been changed"); + }); String smdbString = JvmSettings.METADATA_BLOCK_SYSTEM_METADATA_KEYS.lookupOptional().orElse(null); if (smdbString != null) { From 4d7df9c4abfdebe4b8d19382fd836ff9827f5053 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 23 Feb 2023 11:13:17 -0500 Subject: [PATCH 0049/1525] (draft/work in progress) framework for a new file creation command and storage quota enforcement #9361. --- .../iq/dataverse/DataFileServiceBean.java | 57 ++ .../iq/dataverse/DatasetServiceBean.java | 2 +- .../iq/dataverse/EditDatafilesPage.java | 30 +- .../impl/CreateNewDataFilesCommand.java | 555 ++++++++++++++++++ .../settings/SettingsServiceBean.java | 10 +- .../harvard/iq/dataverse/util/FileUtil.java | 36 +- .../iq/dataverse/util/SystemConfig.java | 16 +- .../util/bagit/data/FileUtilWrapper.java | 7 +- .../FileExceedsStorageQuotaException.java | 22 + src/main/java/propertyFiles/Bundle.properties | 1 + src/main/webapp/editFilesFragment.xhtml | 1 + 11 files changed, 718 insertions(+), 19 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/util/file/FileExceedsStorageQuotaException.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 7da06f36be4..328f2aa59c0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -67,6 +67,8 @@ public class DataFileServiceBean implements java.io.Serializable { @EJB EmbargoServiceBean embargoService; + @EJB SystemConfig systemConfig; + @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; @@ -140,6 +142,36 @@ public class DataFileServiceBean implements java.io.Serializable { */ public static final String MIME_TYPE_PACKAGE_FILE = "application/vnd.dataverse.file-package"; + public class UserStorageQuota { + private Long totalAllocatedInBytes = 0L; + private Long totalUsageInBytes = 0L; + + public UserStorageQuota(Long allocated, Long used) { + this.totalAllocatedInBytes = allocated; + this.totalUsageInBytes = used; + } + + public Long getTotalAllocatedInBytes() { + return totalAllocatedInBytes; + } + + public void setTotalAllocatedInBytes(Long totalAllocatedInBytes) { + this.totalAllocatedInBytes = totalAllocatedInBytes; + } + + public Long getTotalUsageInBytes() { + return totalUsageInBytes; + } + + public void setTotalUsageInBytes(Long totalUsageInBytes) { + this.totalUsageInBytes = totalUsageInBytes; + } + + public Long getRemainingQuotaInBytes() { + return totalAllocatedInBytes - totalUsageInBytes; + } + } + public DataFile find(Object pk) { return em.find(DataFile.class, pk); } @@ -1657,4 +1689,29 @@ public Embargo findEmbargo(Long id) { DataFile d = find(id); return d.getEmbargo(); } + + public Long getStorageUsageByCreator(AuthenticatedUser user) { + Query query = em.createQuery("SELECT SUM(o.filesize) FROM DataFile o WHERE o.creator.id=:creatorId"); + + try { + Long totalSize = (Long)query.setParameter("creatorId", user.getId()).getSingleResult(); + logger.info("total size for user: "+totalSize); + return totalSize == null ? 0L : totalSize; + } catch (NoResultException nre) { // ? + logger.info("NoResultException, returning 0L"); + return 0L; + } + } + + public UserStorageQuota getUserStorageQuota(AuthenticatedUser user, Dataset dataset) { + // this is for testing only - one pre-set, installation-wide quota limit + // for everybody: + Long totalAllocated = systemConfig.getTestStorageQuotaLimit(); + // again, this is for testing only - we are only counting the total size + // of all the files created by this user; it will likely be a much more + // complex calculation in real life applications: + Long totalUsed = getStorageUsageByCreator(user); + + return new UserStorageQuota(totalAllocated, totalUsed); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 91ec050fe5c..4e522bbd441 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -1166,5 +1166,5 @@ public void deleteHarvestedDataset(Dataset dataset, DataverseRequest request, Lo hdLogger.warning("Failed to destroy the dataset"); } } - + } diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 1c033b37872..74c4e782d56 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -28,6 +28,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.RequestRsyncScriptCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDataFilesCommand; import edu.harvard.iq.dataverse.ingest.IngestRequest; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.ingest.IngestUtil; @@ -187,7 +188,13 @@ public enum Referrer { // Used to store results of permissions checks private final Map datasetPermissionMap = new HashMap<>(); // { Permission human_name : Boolean } + // Size limit of an individual file: (set for the storage volume used) private Long maxFileUploadSizeInBytes = null; + // Total amount of data that the user should be allowed to upload. + // Will be calculated in real time based on various level quotas - + // for this user and/or this collection/dataset, etc. We should + // assume that it may change during the user session. + private Long maxTotalUploadSizeInBytes = null; private Long maxIngestSizeInBytes = null; // CSV: 4.8 MB, DTA: 976.6 KB, XLSX: 5.7 MB, etc. private String humanPerFormatTabularLimits = null; @@ -336,6 +343,14 @@ public Long getMaxFileUploadSizeInBytes() { public String getHumanMaxFileUploadSizeInBytes() { return FileSizeChecker.bytesToHumanReadable(this.maxFileUploadSizeInBytes); } + + public Long getMaxTotalUploadSizeInBytes() { + return maxTotalUploadSizeInBytes; + } + + public String getHumanMaxTotalUploadSizeInBytes() { + return FileSizeChecker.bytesToHumanReadable(maxTotalUploadSizeInBytes); + } public boolean isUnlimitedUploadFileSize() { @@ -563,7 +578,6 @@ public String init() { this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit(); - this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); hasValidTermsOfAccess = isHasValidTermsOfAccess(); if (!hasValidTermsOfAccess) { @@ -2024,7 +2038,13 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { // Note: A single uploaded file may produce multiple datafiles - // for example, multiple files can be extracted from an uncompressed // zip file. - CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig); + ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig); + + Command cmd; + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); + CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); + + dFileList = createDataFilesResult.getDataFiles(); String createDataFilesError = editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult); if(createDataFilesError != null) { @@ -2033,8 +2053,14 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { } } catch (IOException ioex) { + // shouldn't we try and communicate to the user what happened? logger.warning("Failed to process and/or save the file " + uFile.getFileName() + "; " + ioex.getMessage()); return; + } catch (CommandException cex) { + // shouldn't we try and communicate to the user what happened? + errorMessages.add(cex.getMessage()); + uploadComponentId = event.getComponent().getClientId(); + return; } /*catch (FileExceedsMaxSizeException ex) { logger.warning("Failed to process and/or save the file " + uFile.getFileName() + "; " + ex.getMessage()); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java new file mode 100644 index 00000000000..9f281f9446d --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -0,0 +1,555 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; +import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker; +import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; +import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper; +import edu.harvard.iq.dataverse.DataFileServiceBean.UserStorageQuota; +import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; +import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.FileUtil; +import static edu.harvard.iq.dataverse.util.FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT; +import static edu.harvard.iq.dataverse.util.FileUtil.createIngestFailureReport; +import static edu.harvard.iq.dataverse.util.FileUtil.determineFileType; +import static edu.harvard.iq.dataverse.util.FileUtil.determineFileTypeByNameAndExtension; +import static edu.harvard.iq.dataverse.util.FileUtil.getFilesTempDirectory; +import static edu.harvard.iq.dataverse.util.FileUtil.saveInputStreamInTempFile; +import static edu.harvard.iq.dataverse.util.FileUtil.useRecognizedType; +import edu.harvard.iq.dataverse.util.ShapefileHandler; +import edu.harvard.iq.dataverse.util.StringUtil; +import edu.harvard.iq.dataverse.util.file.BagItFileHandler; +import edu.harvard.iq.dataverse.util.file.BagItFileHandlerFactory; +import edu.harvard.iq.dataverse.util.file.CreateDataFileResult; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.Charset; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.logging.Logger; +import java.util.zip.GZIPInputStream; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; +import javax.enterprise.inject.spi.CDI; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; + +/** + * + * @author landreev + */ +@RequiredPermissions( Permission.EditDataset ) +public class CreateNewDataFilesCommand extends AbstractCommand { + private static final Logger logger = Logger.getLogger(CreateNewDataFilesCommand.class.getCanonicalName()); + + private final DatasetVersion version; + private final InputStream inputStream; + private final String fileName; + private final String suppliedContentType; + private final String newStorageIdentifier; + private final String newCheckSum; + private DataFile.ChecksumType newCheckSumType; + + public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum) { + this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, newCheckSum, null); + } + + public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, DataFile.ChecksumType newCheckSumType) { + super(aRequest, version.getDataset()); + + this.version = version; + this.inputStream = inputStream; + this.fileName = fileName; + this.suppliedContentType = suppliedContentType; + this.newStorageIdentifier = newStorageIdentifier; + this.newCheckSum = newCheckSum; + this.newCheckSumType = newCheckSumType; + } + + @Override + public CreateDataFileResult execute(CommandContext ctxt) throws CommandException { + List datafiles = new ArrayList<>(); + + //When there is no checksum/checksumtype being sent (normal upload, needs to be calculated), set the type to the current default + if(newCheckSumType == null) { + newCheckSumType = ctxt.systemConfig().getFileFixityChecksumAlgorithm(); + } + + String warningMessage = null; + + // save the file, in the temporary location for now: + Path tempFile = null; + + Long fileSizeLimit = ctxt.systemConfig().getMaxFileUploadSizeForStore(version.getDataset().getEffectiveStorageDriverId()); + Long storageQuotaLimit = null; + + if (ctxt.systemConfig().isStorageQuotasEnforced()) { + //storageQuotaLimit = ctxt.files().getClass()...; + UserStorageQuota quota = ctxt.files().getUserStorageQuota(super.getRequest().getAuthenticatedUser(), this.version.getDataset()); + if (quota != null) { + storageQuotaLimit = quota.getRemainingQuotaInBytes(); + } + } + String finalType = null; + + if (newStorageIdentifier == null) { + if (getFilesTempDirectory() != null) { + try { + tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload"); + // "temporary" location is the key here; this is why we are not using + // the DataStore framework for this - the assumption is that + // temp files will always be stored on the local filesystem. + // -- L.A. Jul. 2014 + logger.fine("Will attempt to save the file as: " + tempFile.toString()); + Files.copy(inputStream, tempFile, StandardCopyOption.REPLACE_EXISTING); + } catch (IOException ioex) { + throw new CommandExecutionException("Failed to save the upload as a temp file (temp disk space?)", ioex, this); + } + + // A file size check, before we do anything else: + // (note that "no size limit set" = "unlimited") + // (also note, that if this is a zip file, we'll be checking + // the size limit for each of the individual unpacked files) + Long fileSize = tempFile.toFile().length(); + if (fileSizeLimit != null && fileSize > fileSizeLimit) { + try { + tempFile.toFile().delete(); + } catch (Exception ex) { + // ignore - but log a warning + logger.warning("Could not remove temp file " + tempFile.getFileName()); + } + throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit)), this); + } + + } else { + throw new CommandExecutionException("Temp directory is not configured.", this); + } + + logger.fine("mime type supplied: " + suppliedContentType); + + // Let's try our own utilities (Jhove, etc.) to determine the file type + // of the uploaded file. (We may already have a mime type supplied for this + // file - maybe the type that the browser recognized on upload; or, if + // it's a harvest, maybe the remote server has already given us the type + // for this file... with our own type utility we may or may not do better + // than the type supplied: + // -- L.A. + String recognizedType = null; + + try { + recognizedType = determineFileType(tempFile.toFile(), fileName); + logger.fine("File utility recognized the file as " + recognizedType); + if (recognizedType != null && !recognizedType.equals("")) { + if (useRecognizedType(suppliedContentType, recognizedType)) { + finalType = recognizedType; + } + } + + } catch (Exception ex) { + logger.warning("Failed to run the file utility mime type check on file " + fileName); + } + + if (finalType == null) { + finalType = (suppliedContentType == null || suppliedContentType.equals("")) + ? MIME_TYPE_UNDETERMINED_DEFAULT + : suppliedContentType; + } + + // A few special cases: + // if this is a gzipped FITS file, we'll uncompress it, and ingest it as + // a regular FITS file: + if (finalType.equals("application/fits-gzipped")) { + + InputStream uncompressedIn = null; + String finalFileName = fileName; + // if the file name had the ".gz" extension, remove it, + // since we are going to uncompress it: + if (fileName != null && fileName.matches(".*\\.gz$")) { + finalFileName = fileName.replaceAll("\\.gz$", ""); + } + + DataFile datafile = null; + try { + uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile())); + File unZippedTempFile = saveInputStreamInTempFile(uncompressedIn, fileSizeLimit, storageQuotaLimit); + datafile = FileUtil.createSingleDataFile(version, unZippedTempFile, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT, ctxt.systemConfig().getFileFixityChecksumAlgorithm()); + } catch (IOException | FileExceedsMaxSizeException | FileExceedsStorageQuotaException ioex) { + // it looks like we simply skip the file silently, if its uncompressed size + // exceeds the limit. we should probably report this in detail instead. + datafile = null; + } finally { + if (uncompressedIn != null) { + try { + uncompressedIn.close(); + } catch (IOException e) { + } + } + } + + // If we were able to produce an uncompressed file, we'll use it + // to create and return a final DataFile; if not, we're not going + // to do anything - and then a new DataFile will be created further + // down, from the original, uncompressed file. + if (datafile != null) { + // remove the compressed temp file: + try { + tempFile.toFile().delete(); + } catch (SecurityException ex) { + // (this is very non-fatal) + logger.warning("Failed to delete temporary file " + tempFile.toString()); + } + + datafiles.add(datafile); + return CreateDataFileResult.success(fileName, finalType, datafiles); + } + + // If it's a ZIP file, we are going to unpack it and create multiple + // DataFile objects from its contents: + } else if (finalType.equals("application/zip")) { + + ZipInputStream unZippedIn = null; + ZipEntry zipEntry = null; + + int fileNumberLimit = ctxt.systemConfig().getZipUploadFilesLimit(); + + try { + Charset charset = null; + /* + TODO: (?) + We may want to investigate somehow letting the user specify + the charset for the filenames in the zip file... + - otherwise, ZipInputStream bails out if it encounteres a file + name that's not valid in the current charest (i.e., UTF-8, in + our case). It would be a bit trickier than what we're doing for + SPSS tabular ingests - with the lang. encoding pulldown menu - + because this encoding needs to be specified *before* we upload and + attempt to unzip the file. + -- L.A. 4.0 beta12 + logger.info("default charset is "+Charset.defaultCharset().name()); + if (Charset.isSupported("US-ASCII")) { + logger.info("charset US-ASCII is supported."); + charset = Charset.forName("US-ASCII"); + if (charset != null) { + logger.info("was able to obtain charset for US-ASCII"); + } + + } + */ + + if (charset != null) { + unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset); + } else { + unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile())); + } + + Long storageQuotaLimitForUnzippedFiles = storageQuotaLimit; + while (true) { + try { + zipEntry = unZippedIn.getNextEntry(); + } catch (IllegalArgumentException iaex) { + // Note: + // ZipInputStream documentation doesn't even mention that + // getNextEntry() throws an IllegalArgumentException! + // but that's what happens if the file name of the next + // entry is not valid in the current CharSet. + // -- L.A. + warningMessage = "Failed to unpack Zip file. (Unknown Character Set used in a file name?) Saving the file as is."; + logger.warning(warningMessage); + throw new IOException(); + } + + if (zipEntry == null) { + break; + } + // Note that some zip entries may be directories - we + // simply skip them: + + if (!zipEntry.isDirectory()) { + if (datafiles.size() > fileNumberLimit) { + logger.warning("Zip upload - too many files."); + warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit + + "); please upload a zip archive with fewer files, if you want them to be ingested " + + "as individual DataFiles."; + throw new IOException(); + } + + String fileEntryName = zipEntry.getName(); + logger.fine("ZipEntry, file: " + fileEntryName); + + if (fileEntryName != null && !fileEntryName.equals("")) { + + String shortName = fileEntryName.replaceFirst("^.*[\\/]", ""); + + // Check if it's a "fake" file - a zip archive entry + // created for a MacOS X filesystem element: (these + // start with "._") + if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) { + // OK, this seems like an OK file entry - we'll try + // to read it and create a DataFile with it: + + File unZippedTempFile = saveInputStreamInTempFile(unZippedIn, fileSizeLimit, storageQuotaLimitForUnzippedFiles); + DataFile datafile = FileUtil.createSingleDataFile(version, + unZippedTempFile, + null, + shortName, + MIME_TYPE_UNDETERMINED_DEFAULT, + ctxt.systemConfig().getFileFixityChecksumAlgorithm(), null, false); + + storageQuotaLimitForUnzippedFiles = storageQuotaLimitForUnzippedFiles - datafile.getFilesize(); + + if (!fileEntryName.equals(shortName)) { + // If the filename looks like a hierarchical folder name (i.e., contains slashes and backslashes), + // we'll extract the directory name; then subject it to some "aggressive sanitizing" - strip all + // the leading, trailing and duplicate slashes; then replace all the characters that + // don't pass our validation rules. + String directoryName = fileEntryName.replaceFirst("[\\\\/][\\\\/]*[^\\\\/]*$", ""); + directoryName = StringUtil.sanitizeFileDirectory(directoryName, true); + // if (!"".equals(directoryName)) { + if (!StringUtil.isEmpty(directoryName)) { + logger.fine("setting the directory label to " + directoryName); + datafile.getFileMetadata().setDirectoryLabel(directoryName); + } + } + + if (datafile != null) { + // We have created this datafile with the mime type "unknown"; + // Now that we have it saved in a temporary location, + // let's try and determine its real type: + + String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier(); + + try { + recognizedType = determineFileType(new File(tempFileName), shortName); + logger.fine("File utility recognized unzipped file as " + recognizedType); + if (recognizedType != null && !recognizedType.equals("")) { + datafile.setContentType(recognizedType); + } + } catch (Exception ex) { + logger.warning("Failed to run the file utility mime type check on file " + fileName); + } + + datafiles.add(datafile); + } + } + } + } + unZippedIn.closeEntry(); + + } + + } catch (IOException ioex) { + // just clear the datafiles list and let + // ingest default to creating a single DataFile out + // of the unzipped file. + logger.warning("Unzipping failed; rolling back to saving the file as is."); + if (warningMessage == null) { + warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed"); + } + + datafiles.clear(); + } catch (FileExceedsMaxSizeException femsx) { + logger.warning("One of the unzipped files exceeds the size limit; resorting to saving the file as is. " + femsx.getMessage()); + warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.size", Arrays.asList(FileSizeChecker.bytesToHumanReadable(fileSizeLimit))); + datafiles.clear(); + } catch (FileExceedsStorageQuotaException fesqx) { + logger.warning("One of the unzipped files exceeds the storage quota limit; resorting to saving the file as is. " + fesqx.getMessage()); + warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.quota", Arrays.asList(FileSizeChecker.bytesToHumanReadable(storageQuotaLimit))); + datafiles.clear(); + } finally { + if (unZippedIn != null) { + try { + unZippedIn.close(); + } catch (Exception zEx) { + } + } + } + if (datafiles.size() > 0) { + // remove the uploaded zip file: + try { + Files.delete(tempFile); + } catch (IOException ioex) { + // do nothing - it's just a temp file. + logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); + } + // and return: + return CreateDataFileResult.success(fileName, finalType, datafiles); + } + + } else if (finalType.equalsIgnoreCase(ShapefileHandler.SHAPEFILE_FILE_TYPE)) { + // Shape files may have to be split into multiple files, + // one zip archive per each complete set of shape files: + + // File rezipFolder = new File(this.getFilesTempDirectory()); + File rezipFolder = FileUtil.getShapefileUnzipTempDirectory(); + + IngestServiceShapefileHelper shpIngestHelper; + shpIngestHelper = new IngestServiceShapefileHelper(tempFile.toFile(), rezipFolder); + + boolean didProcessWork = shpIngestHelper.processFile(); + if (!(didProcessWork)) { + logger.severe("Processing of zipped shapefile failed."); + return CreateDataFileResult.error(fileName, finalType); + } + + try { + Long storageQuotaLimitForRezippedFiles = storageQuotaLimit; + + for (File finalFile : shpIngestHelper.getFinalRezippedFiles()) { + FileInputStream finalFileInputStream = new FileInputStream(finalFile); + finalType = FileUtil.determineContentType(finalFile); + if (finalType == null) { + logger.warning("Content type is null; but should default to 'MIME_TYPE_UNDETERMINED_DEFAULT'"); + continue; + } + + File unZippedShapeTempFile = saveInputStreamInTempFile(finalFileInputStream, fileSizeLimit, storageQuotaLimitForRezippedFiles); + DataFile new_datafile = FileUtil.createSingleDataFile(version, unZippedShapeTempFile, finalFile.getName(), finalType, ctxt.systemConfig().getFileFixityChecksumAlgorithm()); + + String directoryName = null; + String absolutePathName = finalFile.getParent(); + if (absolutePathName != null) { + if (absolutePathName.length() > rezipFolder.toString().length()) { + // This file lives in a subfolder - we want to + // preserve it in the FileMetadata: + directoryName = absolutePathName.substring(rezipFolder.toString().length() + 1); + + if (!StringUtil.isEmpty(directoryName)) { + new_datafile.getFileMetadata().setDirectoryLabel(directoryName); + } + } + } + if (new_datafile != null) { + datafiles.add(new_datafile); + // todo: can this new_datafile be null? + storageQuotaLimitForRezippedFiles = storageQuotaLimitForRezippedFiles - new_datafile.getFilesize(); + } else { + logger.severe("Could not add part of rezipped shapefile. new_datafile was null: " + finalFile.getName()); + } + try { + finalFileInputStream.close(); + } catch (IOException ioex) { + // this one can be ignored + } + + } + } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { + logger.severe("One of the unzipped shape files exceeded the size limit, or the storage quota; giving up. " + femsx.getMessage()); + datafiles.clear(); + // (or should we throw an exception, instead of skipping it quietly? + } catch (IOException ioex) { + throw new CommandExecutionException("Failed to process one of the components of the unpacked shape file", ioex, this); + // todo? - maybe try to provide a more detailed explanation, of which repackaged component, etc.? + } + + // Delete the temp directory used for unzipping + // The try-catch is due to error encountered in using NFS for stocking file, + // cf. https://github.com/IQSS/dataverse/issues/5909 + try { + FileUtils.deleteDirectory(rezipFolder); + } catch (IOException ioex) { + // do nothing - it's a temp folder. + logger.warning("Could not remove temp folder, error message : " + ioex.getMessage()); + } + + if (datafiles.size() > 0) { + // remove the uploaded zip file: + try { + Files.delete(tempFile); + } catch (IOException ioex) { + // ignore - it's just a temp file - but let's log a warning + logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); + } catch (SecurityException se) { + // same + logger.warning("Unable to delete: " + tempFile.toString() + "due to Security Exception: " + + se.getMessage()); + } + return CreateDataFileResult.success(fileName, finalType, datafiles); + } else { + logger.severe("No files added from directory of rezipped shapefiles"); + } + return CreateDataFileResult.error(fileName, finalType); + + } else if (finalType.equalsIgnoreCase(BagItFileHandler.FILE_TYPE)) { + + try { + Optional bagItFileHandler = CDI.current().select(BagItFileHandlerFactory.class).get().getBagItFileHandler(); + if (bagItFileHandler.isPresent()) { + CreateDataFileResult result = bagItFileHandler.get().handleBagItPackage(ctxt.systemConfig(), version, fileName, tempFile.toFile()); + return result; + } + } catch (IOException ioex) { + throw new CommandExecutionException("Failed to process uploaded BagIt file", ioex, this); + } + } + } else { + // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied + finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; + String type = determineFileTypeByNameAndExtension(fileName); + if (!StringUtils.isBlank(type)) { + //Use rules for deciding when to trust browser supplied type + if (useRecognizedType(finalType, type)) { + finalType = type; + } + logger.fine("Supplied type: " + suppliedContentType + ", finalType: " + finalType); + } + } + // Finally, if none of the special cases above were applicable (or + // if we were unable to unpack an uploaded file, etc.), we'll just + // create and return a single DataFile: + File newFile = null; + if (tempFile != null) { + newFile = tempFile.toFile(); + } + + // We have already checked that this file does not exceed the individual size limit; + // but if we are processing it as is, as a single file, we need to check if + // its size does not go beyond the allocated storage quota (if specified): + + long fileSize = newFile.length(); + + if (storageQuotaLimit != null && fileSize > storageQuotaLimit) { + try { + tempFile.toFile().delete(); + } catch (Exception ex) { + // ignore - but log a warning + logger.warning("Could not remove temp file " + tempFile.getFileName()); + } + throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit)), this); + } + + DataFile datafile = FileUtil.createSingleDataFile(version, newFile, newStorageIdentifier, fileName, finalType, newCheckSumType, newCheckSum); + File f = null; + if (tempFile != null) { + f = tempFile.toFile(); + } + if (datafile != null && ((f != null) || (newStorageIdentifier != null))) { + + if (warningMessage != null) { + createIngestFailureReport(datafile, warningMessage); + datafile.SetIngestProblem(); + } + datafiles.add(datafile); + + return CreateDataFileResult.success(fileName, finalType, datafiles); + } + + return CreateDataFileResult.error(fileName, finalType); + } // end createDataFiles +} diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index d84e18d5931..7f44b4c6a0d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -576,7 +576,15 @@ Whether Harvesting (OAI) service is enabled /** * The URL for the DvWebLoader tool (see github.com/gdcc/dvwebloader for details) */ - WebloaderUrl + WebloaderUrl, + /** + * Enforce storage quotas: + */ + UseStorageQuotas, + /** + * Placeholder storage quota (defines the same quota setting for every user; used to test the concept of a quota. + */ + StorageQuotaSizeInBytes ; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index c600abfd409..ba24472b314 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -105,6 +105,7 @@ import edu.harvard.iq.dataverse.dataaccess.DataAccessOption; import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker; import java.util.Arrays; import org.apache.commons.io.IOUtils; @@ -411,7 +412,7 @@ public static String getUserFriendlyOriginalType(DataFile dataFile) { * Returns a content type string for a FileObject * */ - private static String determineContentType(File fileObject) { + public static String determineContentType(File fileObject) { if (fileObject==null){ return null; } @@ -902,7 +903,7 @@ public static CreateDataFileResult createDataFiles(DatasetVersion version, Input uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile())); File unZippedTempFile = saveInputStreamInTempFile(uncompressedIn, fileSizeLimit); datafile = createSingleDataFile(version, unZippedTempFile, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT, systemConfig.getFileFixityChecksumAlgorithm()); - } catch (IOException | FileExceedsMaxSizeException ioex) { + } catch (IOException | FileExceedsMaxSizeException | FileExceedsStorageQuotaException ioex) { datafile = null; } finally { if (uncompressedIn != null) { @@ -1068,7 +1069,7 @@ public static CreateDataFileResult createDataFiles(DatasetVersion version, Input } datafiles.clear(); - } catch (FileExceedsMaxSizeException femsx) { + } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { logger.warning("One of the unzipped files exceeds the size limit; resorting to saving the file as is. " + femsx.getMessage()); warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.size", Arrays.asList(FileSizeChecker.bytesToHumanReadable(fileSizeLimit))); datafiles.clear(); @@ -1154,7 +1155,7 @@ public static CreateDataFileResult createDataFiles(DatasetVersion version, Input finalFileInputStream.close(); } - } catch (FileExceedsMaxSizeException femsx) { + } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { logger.severe("One of the unzipped shape files exceeded the size limit; giving up. " + femsx.getMessage()); datafiles.clear(); } @@ -1271,7 +1272,12 @@ public static boolean useRecognizedType(String suppliedContentType, String recog } public static File saveInputStreamInTempFile(InputStream inputStream, Long fileSizeLimit) - throws IOException, FileExceedsMaxSizeException { + throws IOException, FileExceedsMaxSizeException, FileExceedsStorageQuotaException { + return saveInputStreamInTempFile(inputStream, fileSizeLimit, null); + } + + public static File saveInputStreamInTempFile(InputStream inputStream, Long fileSizeLimit, Long storageQuotaLimit) + throws IOException, FileExceedsMaxSizeException, FileExceedsStorageQuotaException { Path tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload"); if (inputStream != null && tempFile != null) { @@ -1285,6 +1291,11 @@ public static File saveInputStreamInTempFile(InputStream inputStream, Long fileS throw new FileExceedsMaxSizeException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit))); } + if (storageQuotaLimit != null && fileSize > storageQuotaLimit) { + try {tempFile.toFile().delete();} catch (Exception ex) {} + throw new FileExceedsStorageQuotaException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_quota"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit))); + } + return tempFile.toFile(); } throw new IOException("Failed to save uploaded file."); @@ -1325,7 +1336,6 @@ public static DataFile createSingleDataFile(DatasetVersion version, File tempFil datafile.setPermissionModificationTime(new Timestamp(new Date().getTime())); FileMetadata fmd = new FileMetadata(); - // TODO: add directoryLabel? fmd.setLabel(fileName); if (addToDataset) { @@ -1341,13 +1351,13 @@ public static DataFile createSingleDataFile(DatasetVersion version, File tempFil fmd.setDatasetVersion(version); version.getDataset().getFiles().add(datafile); } - if(storageIdentifier==null) { - generateStorageIdentifier(datafile); - if (!tempFile.renameTo(new File(getFilesTempDirectory() + "/" + datafile.getStorageIdentifier()))) { - return null; - } + if (storageIdentifier == null) { + generateStorageIdentifier(datafile); + if (!tempFile.renameTo(new File(getFilesTempDirectory() + "/" + datafile.getStorageIdentifier()))) { + return null; + } } else { - datafile.setStorageIdentifier(storageIdentifier); + datafile.setStorageIdentifier(storageIdentifier); } if ((checksum !=null)&&(!checksum.isEmpty())) { @@ -1372,7 +1382,7 @@ public static DataFile createSingleDataFile(DatasetVersion version, File tempFil Naming convention: getFilesTempDirectory() + "shp_" + "yyyy-MM-dd-hh-mm-ss-SSS" */ - private static File getShapefileUnzipTempDirectory(){ + public static File getShapefileUnzipTempDirectory(){ String tempDirectory = getFilesTempDirectory(); if (tempDirectory == null){ diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index c989add6e3d..ac4a3970379 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -892,7 +892,7 @@ public String toString() { } } - + public boolean isPublicInstall(){ boolean saneDefault = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.PublicInstall, saneDefault); @@ -1149,4 +1149,18 @@ public boolean isSignupDisabledForRemoteAuthProvider(String providerId) { return !ret; } + + public boolean isStorageQuotasEnforced() { + return settingsService.isTrueForKey(SettingsServiceBean.Key.UseStorageQuotas, false); + } + + /** + * This method should only be used for testing of the new storage quota + * mechanism, temporarily. (it uses the same value as the quota for + * *everybody* regardless of the circumstances, defined as a database + * setting) + */ + public Long getTestStorageQuotaLimit() { + return settingsService.getValueForKeyAsLong(SettingsServiceBean.Key.StorageQuotaSizeInBytes); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java index 2bcac04076a..ecb34bdcfb5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; +import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; import edu.harvard.iq.dataverse.util.FileUtil; import java.io.File; @@ -43,7 +44,11 @@ public void deleteFile(Path filePath) { } public File saveInputStreamInTempFile(InputStream inputStream, Long fileSizeLimit) throws IOException, FileExceedsMaxSizeException { - return FileUtil.saveInputStreamInTempFile(inputStream, fileSizeLimit); + try { + return FileUtil.saveInputStreamInTempFile(inputStream, fileSizeLimit); + } catch (FileExceedsStorageQuotaException fesqx) { + return null; + } } public String determineFileType(File file, String fileName) throws IOException { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/file/FileExceedsStorageQuotaException.java b/src/main/java/edu/harvard/iq/dataverse/util/file/FileExceedsStorageQuotaException.java new file mode 100644 index 00000000000..29eeca254f7 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/util/file/FileExceedsStorageQuotaException.java @@ -0,0 +1,22 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.util.file; + +/** + * + * @author landreev + */ +public class FileExceedsStorageQuotaException extends Exception { + + public FileExceedsStorageQuotaException(String message) { + super(message); + } + + public FileExceedsStorageQuotaException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 45807dc7cde..c1fd4ebaf10 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2145,6 +2145,7 @@ file.message.replaceSuccess=The file has been replaced. file.addreplace.file_size_ok=File size is in range. file.addreplace.error.byte_abrev=B file.addreplace.error.file_exceeds_limit=This file size ({0}) exceeds the size limit of {1}. +file.addreplace.error.quota_exceeded=This file (size {0}) exceeds the remaining storage quota of {1}. file.addreplace.error.dataset_is_null=The dataset cannot be null. file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null. file.addreplace.error.parsing=Error in parsing provided json diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index a4e635b8c14..99db5abd2dc 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -155,6 +155,7 @@ fileLimit="#{EditDatafilesPage.getMaxNumberOfFiles()}" invalidSizeMessage="#{bundle['file.edit.error.file_exceeds_limit']}" sequential="true" + previewWidth="-1" widgetVar="fileUploadWidget"> From 101580fe5f9c1896a2c57b46eae85b139d90cfe4 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 23 Feb 2023 14:06:19 -0500 Subject: [PATCH 0050/1525] respond to review comments --- .../engine/command/DataverseRequest.java | 7 ++++++ .../command/impl/AbstractDatasetCommand.java | 24 +++++++------------ .../iq/dataverse/settings/JvmSettings.java | 2 +- .../META-INF/microprofile-config.properties | 3 --- 4 files changed, 16 insertions(+), 20 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java index d0e5ce04002..0c42ea9cc56 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java @@ -194,4 +194,11 @@ public HttpServletRequest getHttpServletRequest() { return httpServletRequest; } + public String getHttpServletRequestParameter(String paramName) { + if(httpServletRequest != null) { + return httpServletRequest.getParameter(paramName); + } + return null; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java index 391f0d1d1a5..dd42a92fd42 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java @@ -224,25 +224,17 @@ protected Timestamp getTimestamp() { protected void checkSystemMetadataKeyIfNeeded(DatasetVersion newVersion, DatasetVersion persistedVersion) throws IllegalCommandException { Set changedMDBs = DatasetVersionDifference.getBlocksWithChanges(newVersion, persistedVersion); - changedMDBs.forEach(mdb -> { + for (MetadataBlock mdb : changedMDBs) { logger.fine(mdb.getName() + " has been changed"); - }); - - String smdbString = JvmSettings.METADATA_BLOCK_SYSTEM_METADATA_KEYS.lookupOptional().orElse(null); - if (smdbString != null) { - JsonObject systemMetadataBlocks = JsonUtil.getJsonObject(smdbString); - HttpServletRequest httpServletRequest = getRequest().getHttpServletRequest(); - if (httpServletRequest != null) { - String mdKey = httpServletRequest.getParameter(MetadataBlockServiceBean.SYSTEM_MD_KEY); - for (MetadataBlock mdb : changedMDBs) { - if (systemMetadataBlocks.containsKey(mdb.getName())) { - if (mdKey==null || !mdKey.equals(systemMetadataBlocks.getString(mdb.getName()))) { - throw new IllegalCommandException("Updating system metadata requires a key", this); - } - } + String smdbString = JvmSettings.METADATA_BLOCK_SYSTEM_METADATA_KEYS.lookupOptional(mdb.getName()) + .orElse(null); + if (smdbString != null) { + String mdKey = getRequest() + .getHttpServletRequestParameter(MetadataBlockServiceBean.SYSTEM_MD_KEY + "." + mdb.getName()); + if (mdKey == null || !mdKey.equals(smdbString)) { + throw new IllegalCommandException("Updating system metadata in block " + mdb.getName() + " requires a valid key", this); } } } - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index c9e72c53212..c4e072e02d0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -70,7 +70,7 @@ public enum JvmSettings { // METADATA SETTINGS SCOPE_METADATA(PREFIX, "metadata"), - METADATA_BLOCK_SYSTEM_METADATA_KEYS(SCOPE_METADATA, "block-system-metadata-keys"), + METADATA_BLOCK_SYSTEM_METADATA_KEYS(SCOPE_METADATA, "block-system-metadata-keys.%1$s"), ; private static final String SCOPE_SEPARATOR = "."; diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index 84f42d321ff..58592775a98 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -40,6 +40,3 @@ dataverse.oai.server.maxsets=100 # the OAI repository name, as shown by the Identify verb, # can be customized via the setting below: #dataverse.oai.server.repositoryname= - -# METADATA SETTINGS -#dataverse.metadata.block-system-metadata-keys From 1f547852df023636128b11ea026bcc90e8edf78b Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 23 Feb 2023 16:00:23 -0500 Subject: [PATCH 0051/1525] allow headers or q params for system mdb keys --- .../iq/dataverse/MetadataBlockServiceBean.java | 2 -- .../dataverse/engine/command/DataverseRequest.java | 14 ++++++++++---- .../command/impl/AbstractDatasetCommand.java | 3 +-- 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java index d8a0e5fb56f..f34637dbfaf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java @@ -18,8 +18,6 @@ public class MetadataBlockServiceBean { @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; - public static final String SYSTEM_MD_KEY="mdkey"; - public MetadataBlock save(MetadataBlock mdb) { return em.merge(mdb); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java index 0c42ea9cc56..170b380dbb0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java @@ -28,6 +28,8 @@ public class DataverseRequest { private final static String undefined = "0.0.0.0"; + private final static String MDKEY_PREFIX="mdkey."; + private static final Logger logger = Logger.getLogger(DataverseRequest.class.getName()); private static String headerToUse = null; @@ -194,11 +196,15 @@ public HttpServletRequest getHttpServletRequest() { return httpServletRequest; } - public String getHttpServletRequestParameter(String paramName) { - if(httpServletRequest != null) { - return httpServletRequest.getParameter(paramName); + public String getSystemMetadataBlockKeyFor(String blockName) { + String key = null; + if (httpServletRequest != null) { + key = httpServletRequest.getHeader(MDKEY_PREFIX + blockName); + if (key == null) { + key = httpServletRequest.getParameter(MDKEY_PREFIX + blockName); + } } - return null; + return key; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java index dd42a92fd42..772a73d8799 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java @@ -229,8 +229,7 @@ protected void checkSystemMetadataKeyIfNeeded(DatasetVersion newVersion, Dataset String smdbString = JvmSettings.METADATA_BLOCK_SYSTEM_METADATA_KEYS.lookupOptional(mdb.getName()) .orElse(null); if (smdbString != null) { - String mdKey = getRequest() - .getHttpServletRequestParameter(MetadataBlockServiceBean.SYSTEM_MD_KEY + "." + mdb.getName()); + String mdKey = getRequest().getSystemMetadataBlockKeyFor(mdb.getName()); if (mdKey == null || !mdKey.equals(smdbString)) { throw new IllegalCommandException("Updating system metadata in block " + mdb.getName() + " requires a valid key", this); } From dbc81fc8041ce32fc69c5ab81f7a0c0ce881f9ea Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 23 Feb 2023 16:00:47 -0500 Subject: [PATCH 0052/1525] docs and release notes --- .../9387-system_metadata_blocks.md | 1 + .../source/admin/metadatacustomization.rst | 40 +++++++++++++++++++ .../dataset-semantic-metadata-api.rst | 2 + 3 files changed, 43 insertions(+) create mode 100644 doc/release-notes/9387-system_metadata_blocks.md diff --git a/doc/release-notes/9387-system_metadata_blocks.md b/doc/release-notes/9387-system_metadata_blocks.md new file mode 100644 index 00000000000..69110d141f8 --- /dev/null +++ b/doc/release-notes/9387-system_metadata_blocks.md @@ -0,0 +1 @@ +Dataverse supports requiring a secret key to add or edit metadata in specified 'system' metadata blocks. Changing the metadata in such system metadata blocks is not allowed without the key and is currently only allowed via API. diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index 9fb8626d4c4..ed48131966e 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -577,6 +577,46 @@ The scripts required can be hosted locally or retrieved dynamically from https:/ Please note that in addition to the :ref:`:CVocConf` described above, an alternative is the :ref:`:ControlledVocabularyCustomJavaScript` setting. +Protecting MetadataBlocks +------------------------- + +Dataverse can be configured to only allow entries for a metadata block to be changed (created, edited, deleted) by entities that know a defined secret key. +Metadata blocks protected by such a key are referred to as "System" metadata blocks. +A primary use case for system metadata blocks is to handle metadata created by third-party tools interacting with Dataverse where unintended changes to the metadata could cause a failure. Examples might include archiving systems or workflow engines. +To protect an existing metadatablock, one must set a key (recommended to be long and un-guessable) for that block: + +dataverse.metadata.block-system-metadata-keys.= + +This can be done using system properties (see :ref:`jvm-options`), environment variables or other MicroProfile Config mechanisms supported by the app server. + `See Payara docs for supported sources `_. + +For these secret keys, a password alias the "dir config source" of Payara are recommended. + + Alias creation example using the codemeta metadata block: + + .. code-block:: shell + + echo "AS_ADMIN_ALIASBLOCKKEY=1234ChangeMeToSomethingLong" > /tmp/key.txt + asadmin create-password-alias --passwordfile /tmp/key.txt dataverse.metadata.block-system-metadata-keys.codemeta + rm /tmp/key.txt + +When protected via a key, a metadata block will not be shown in the user interface when a dataset is being created or when metadata is being edited. Entries in such a system metadata block will be shown to users, consistent with Dataverse's design in which all metadata in published datasets is publicly visible. + +To add metadata to a system metadata block via API, one must include an additional key of the form + +mdkey.= + +as an HTTP Header or query parameter for each system metadata block to any API call in which metadata values are changed in that block. Multiple keys are allowed if more than one system metadatablock is being changed in a given API call. + +For example, following the :ref:`Add Dataset Metadata ` example from the :doc:`/developers/dataset-semantic-metadata-api`: + +.. code-block:: bash + + curl -X PUT -H X-Dataverse-key:$API_TOKEN -H 'Content-Type: application/ld+json' -H 'mdkey.codemeta=1234' -d '{"title": "Submit menu test", "@context":{"title": "http://purl.org/dc/terms/title"}}' "$SERVER_URL/api/datasets/$DATASET_ID/metadata + + curl -X PUT -H X-Dataverse-key:$API_TOKEN -H 'Content-Type: application/ld+json' -d '{"title": "Submit menu test", "@context":{"title": "http://purl.org/dc/terms/title"}}' "$SERVER_URL/api/datasets/$DATASET_ID/metadata?mdkey.codemeta=1234 + + Tips from the Dataverse Community --------------------------------- diff --git a/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst b/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst index 52a6a283e9c..ded62288eb2 100644 --- a/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst +++ b/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst @@ -36,6 +36,8 @@ To get the json-ld formatted metadata for a Dataset, specify the Dataset ID (DAT You should expect a 200 ("OK") response and JSON-LD mirroring the OAI-ORE representation in the returned 'data' object. +.. _add-semantic-metadata: + Add Dataset Metadata -------------------- From 7dea42929332452731141677da767e670702d3ca Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 23 Feb 2023 18:03:27 -0500 Subject: [PATCH 0053/1525] fix docs --- doc/sphinx-guides/source/admin/metadatacustomization.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index ed48131966e..120c3c09b16 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -596,7 +596,7 @@ For these secret keys, a password alias the "dir config source" of Payara are re .. code-block:: shell - echo "AS_ADMIN_ALIASBLOCKKEY=1234ChangeMeToSomethingLong" > /tmp/key.txt + echo "AS_ADMIN_ALIASPASSWORD=1234ChangeMeToSomethingLong" > /tmp/key.txt asadmin create-password-alias --passwordfile /tmp/key.txt dataverse.metadata.block-system-metadata-keys.codemeta rm /tmp/key.txt From 33c4102f1f17bb770acbdf82d833afc83a3f2bdc Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 23 Feb 2023 18:03:54 -0500 Subject: [PATCH 0054/1525] fix microprofile key parameterization --- .../iq/dataverse/DatasetVersionUI.java | 4 ++-- .../harvard/iq/dataverse/SettingsWrapper.java | 20 ++++++++++++------- .../command/impl/AbstractDatasetCommand.java | 6 ++++-- .../iq/dataverse/settings/JvmSettings.java | 3 ++- 4 files changed, 21 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java index f080f34c437..6e9f9c17f7a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java @@ -399,7 +399,7 @@ public void setMetadataValueBlocks(DatasetVersion datasetVersion) { metadataBlocksForView.clear(); metadataBlocksForEdit.clear(); - JsonObject systemMDBlocks = settingsWrapper.getSystemMetadataBlocks(); + List systemMDBlocks = settingsWrapper.getSystemMetadataBlocks(); Long dvIdForInputLevel = datasetVersion.getDataset().getOwner().getId(); @@ -443,7 +443,7 @@ public void setMetadataValueBlocks(DatasetVersion datasetVersion) { if (!datasetFieldsForView.isEmpty()) { metadataBlocksForView.put(mdb, datasetFieldsForView); } - if (!datasetFieldsForEdit.isEmpty() && !systemMDBlocks.containsKey(mdb.getName())) { + if (!datasetFieldsForEdit.isEmpty() && !systemMDBlocks.contains(mdb)) { metadataBlocksForEdit.put(mdb, datasetFieldsForEdit); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index 5ee8dd53def..432ef11e0c8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -61,6 +61,9 @@ public class SettingsWrapper implements java.io.Serializable { @EJB DatasetFieldServiceBean fieldService; + + @EJB + MetadataBlockServiceBean mdbService; private Map settingsMap; @@ -117,7 +120,7 @@ public class SettingsWrapper implements java.io.Serializable { private Boolean customLicenseAllowed = null; - private JsonObject systemMetadataBlocks; + private List systemMetadataBlocks; private Set alwaysMuted = null; @@ -705,16 +708,19 @@ public boolean isCustomLicenseAllowed() { return customLicenseAllowed; } - public JsonObject getSystemMetadataBlocks() { + public List getSystemMetadataBlocks() { + if (systemMetadataBlocks == null) { - String smdbString = JvmSettings.METADATA_BLOCK_SYSTEM_METADATA_KEYS.lookupOptional().orElse(null); + systemMetadataBlocks = new ArrayList(); + } + List blocks = mdbService.listMetadataBlocks(); + for (MetadataBlock mdb : blocks) { + String smdbString = JvmSettings.MDB_SYSTEM_KEY_FOR.lookupOptional(mdb.getName()).orElse(null); if (smdbString != null) { - systemMetadataBlocks = JsonUtil.getJsonObject(smdbString); - } - if (systemMetadataBlocks == null) { - systemMetadataBlocks = Json.createObjectBuilder().build(); + systemMetadataBlocks.add(mdb); } } + return systemMetadataBlocks; } } \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java index 772a73d8799..865993b19ad 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java @@ -226,11 +226,13 @@ protected void checkSystemMetadataKeyIfNeeded(DatasetVersion newVersion, Dataset Set changedMDBs = DatasetVersionDifference.getBlocksWithChanges(newVersion, persistedVersion); for (MetadataBlock mdb : changedMDBs) { logger.fine(mdb.getName() + " has been changed"); - String smdbString = JvmSettings.METADATA_BLOCK_SYSTEM_METADATA_KEYS.lookupOptional(mdb.getName()) + String smdbString = JvmSettings.MDB_SYSTEM_KEY_FOR.lookupOptional(mdb.getName()) .orElse(null); if (smdbString != null) { + logger.info("Found key: " + smdbString); String mdKey = getRequest().getSystemMetadataBlockKeyFor(mdb.getName()); - if (mdKey == null || !mdKey.equals(smdbString)) { + logger.info("Found supplied key: " + mdKey); + if (mdKey == null || !mdKey.equalsIgnoreCase(smdbString)) { throw new IllegalCommandException("Updating system metadata in block " + mdb.getName() + " requires a valid key", this); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index c4e072e02d0..e58c8c4494c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -70,7 +70,8 @@ public enum JvmSettings { // METADATA SETTINGS SCOPE_METADATA(PREFIX, "metadata"), - METADATA_BLOCK_SYSTEM_METADATA_KEYS(SCOPE_METADATA, "block-system-metadata-keys.%1$s"), + MDB_SYSTEM_METADATA_KEYS(SCOPE_METADATA, "block-system-metadata-keys"), + MDB_SYSTEM_KEY_FOR(MDB_SYSTEM_METADATA_KEYS), ; private static final String SCOPE_SEPARATOR = "."; From d5fd5e3e690f0b39d630b4774ec2807b2ec08750 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 23 Feb 2023 18:35:56 -0500 Subject: [PATCH 0055/1525] switched to the new Create Files command in the remaining places where the utility was used. #9361 --- .../iq/dataverse/EditDatafilesPage.java | 18 +++--- .../datadeposit/MediaResourceManagerImpl.java | 58 ++++++++++--------- .../datasetutility/AddReplaceFileHelper.java | 12 ++-- .../harvard/iq/dataverse/util/FileUtil.java | 6 +- 4 files changed, 54 insertions(+), 40 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 74c4e782d56..928bf635ffa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -1508,14 +1508,16 @@ public void handleDropBoxUpload(ActionEvent event) { // for example, multiple files can be extracted from an uncompressed // zip file. //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); - CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null, systemConfig); + //CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null, systemConfig); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null); + CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage)); - } catch (IOException ex) { + } catch (CommandException ex) { this.logger.log(Level.SEVERE, "Error during ingest of DropBox file {0} from link {1}", new Object[]{fileName, fileLink}); continue; - }/*catch (FileExceedsMaxSizeException ex){ + } /*catch (FileExceedsMaxSizeException ex){ this.logger.log(Level.SEVERE, "Error during ingest of DropBox file {0} from link {1}: {2}", new Object[]{fileName, fileLink, ex.getMessage()}); continue; }*/ finally { @@ -2040,8 +2042,7 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { // zip file. ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig); - Command cmd; - cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); @@ -2165,10 +2166,13 @@ public void handleExternalUpload() { // for example, multiple files can be extracted from an uncompressed // zip file. //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); - CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType, systemConfig); + ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType, systemConfig); + + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType); + CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage)); - } catch (IOException ex) { + } catch (CommandException ex) { logger.log(Level.SEVERE, "Error during ingest of file {0}", new Object[]{fileName}); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index 5491024c73c..f21a65bdf1e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -6,14 +6,17 @@ import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseRequestServiceBean; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.PermissionServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; +import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDataFilesCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -69,6 +72,8 @@ public class MediaResourceManagerImpl implements MediaResourceManager { SwordAuth swordAuth; @Inject UrlManager urlManager; + @Inject + DataverseRequestServiceBean dvRequestService; private HttpServletRequest httpRequest; @@ -301,37 +306,38 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au */ String guessContentTypeForMe = null; List dataFiles = new ArrayList<>(); + try { - try { - CreateDataFileResult createDataFilesResponse = FileUtil.createDataFiles(editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null, systemConfig); - dataFiles = createDataFilesResponse.getDataFiles(); - } catch (EJBException ex) { - Throwable cause = ex.getCause(); - if (cause != null) { - if (cause instanceof IllegalArgumentException) { - /** - * @todo should be safe to remove this catch of - * EJBException and IllegalArgumentException once - * this ticket is resolved: - * - * IllegalArgumentException: MALFORMED when - * uploading certain zip files - * https://github.com/IQSS/dataverse/issues/1021 - */ - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles. Problem with zip file, perhaps: " + cause); - } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles: " + cause); - } + //CreateDataFileResult createDataFilesResponse = FileUtil.createDataFiles(editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null, systemConfig); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null); + CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); + dataFiles = createDataFilesResult.getDataFiles(); + } catch (CommandException ex) { + Throwable cause = ex.getCause(); + if (cause != null) { + if (cause instanceof IllegalArgumentException) { + /** + * @todo should be safe to remove this catch of + * EJBException and IllegalArgumentException once this + * ticket is resolved: + * + * IllegalArgumentException: MALFORMED when uploading + * certain zip files + * https://github.com/IQSS/dataverse/issues/1021 + */ + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset. Problem with zip file, perhaps: " + cause); } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles. No cause: " + ex.getMessage()); + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + cause); } - } /*TODO: L.A. 4.6! catch (FileExceedsMaxSizeException ex) { + } else { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + ex.getMessage()); + } + } + /*TODO: L.A. 4.6! catch (FileExceedsMaxSizeException ex) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles: " + ex.getMessage()); //Logger.getLogger(MediaResourceManagerImpl.class.getName()).log(Level.SEVERE, null, ex); - }*/ - } catch (IOException ex) { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + ex.getMessage()); - } + }*/ + if (!dataFiles.isEmpty()) { Set constraintViolations = editVersion.validate(); if (constraintViolations.size() > 0) { diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index 1d0ec0f19d9..e31f86093ed 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -63,6 +63,7 @@ import static edu.harvard.iq.dataverse.api.AbstractApiBean.STATUS_ERROR; import static edu.harvard.iq.dataverse.api.AbstractApiBean.STATUS_OK; +import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDataFilesCommand; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; /** @@ -1205,17 +1206,20 @@ private boolean step_030_createNewFilesViaIngest(){ clone = workingVersion.cloneDatasetVersion(); } try { - CreateDataFileResult result = FileUtil.createDataFiles(workingVersion, + /*CreateDataFileResult result = FileUtil.createDataFiles(workingVersion, this.newFileInputStream, this.newFileName, this.newFileContentType, this.newStorageIdentifier, this.newCheckSum, this.newCheckSumType, - this.systemConfig); - initialFileList = result.getDataFiles(); + this.systemConfig);*/ + + Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, newCheckSum, newCheckSumType); + CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); + initialFileList = createDataFilesResult.getDataFiles(); - } catch (IOException ex) { + } catch (CommandException ex) { if (!Strings.isNullOrEmpty(ex.getMessage())) { this.addErrorSevere(getBundleErr("ingest_create_file_err") + " " + ex.getMessage()); } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index ba24472b314..0c099242849 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -805,7 +805,7 @@ public static String generateOriginalExtension(String fileType) { return ""; } - public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, + /*public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, SystemConfig systemConfig) throws IOException { ChecksumType checkSumType = DataFile.ChecksumType.MD5; @@ -813,7 +813,7 @@ public static CreateDataFileResult createDataFiles(DatasetVersion version, Input checkSumType = systemConfig.getFileFixityChecksumAlgorithm(); } return createDataFiles(version, inputStream, fileName, suppliedContentType, newStorageIdentifier, newCheckSum, checkSumType, systemConfig); - } + }*/ public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, ChecksumType newCheckSumType, SystemConfig systemConfig) throws IOException { List datafiles = new ArrayList<>(); @@ -1293,7 +1293,7 @@ public static File saveInputStreamInTempFile(InputStream inputStream, Long fileS if (storageQuotaLimit != null && fileSize > storageQuotaLimit) { try {tempFile.toFile().delete();} catch (Exception ex) {} - throw new FileExceedsStorageQuotaException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_quota"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit))); + throw new FileExceedsStorageQuotaException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit))); } return tempFile.toFile(); From 6210c3435ff7df308a6491c5b9a0b0b23d758774 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 23 Feb 2023 18:43:23 -0500 Subject: [PATCH 0056/1525] removed the static utility methods that have been turned into a command. #9361 --- .../harvard/iq/dataverse/util/FileUtil.java | 430 ------------------ 1 file changed, 430 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 0c099242849..014f44c5c33 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -804,436 +804,6 @@ public static String generateOriginalExtension(String fileType) { } return ""; } - - /*public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, - String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, - SystemConfig systemConfig) throws IOException { - ChecksumType checkSumType = DataFile.ChecksumType.MD5; - if (newStorageIdentifier == null) { - checkSumType = systemConfig.getFileFixityChecksumAlgorithm(); - } - return createDataFiles(version, inputStream, fileName, suppliedContentType, newStorageIdentifier, newCheckSum, checkSumType, systemConfig); - }*/ - - public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, ChecksumType newCheckSumType, SystemConfig systemConfig) throws IOException { - List datafiles = new ArrayList<>(); - - //When there is no checksum/checksumtype being sent (normal upload, needs to be calculated), set the type to the current default - if(newCheckSumType == null) { - newCheckSumType = systemConfig.getFileFixityChecksumAlgorithm(); - } - - String warningMessage = null; - - // save the file, in the temporary location for now: - Path tempFile = null; - - Long fileSizeLimit = systemConfig.getMaxFileUploadSizeForStore(version.getDataset().getEffectiveStorageDriverId()); - String finalType = null; - if (newStorageIdentifier == null) { - if (getFilesTempDirectory() != null) { - tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload"); - // "temporary" location is the key here; this is why we are not using - // the DataStore framework for this - the assumption is that - // temp files will always be stored on the local filesystem. - // -- L.A. Jul. 2014 - logger.fine("Will attempt to save the file as: " + tempFile.toString()); - Files.copy(inputStream, tempFile, StandardCopyOption.REPLACE_EXISTING); - - // A file size check, before we do anything else: - // (note that "no size limit set" = "unlimited") - // (also note, that if this is a zip file, we'll be checking - // the size limit for each of the individual unpacked files) - Long fileSize = tempFile.toFile().length(); - if (fileSizeLimit != null && fileSize > fileSizeLimit) { - try { - tempFile.toFile().delete(); - } catch (Exception ex) { - } - throw new IOException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit))); - } - - } else { - throw new IOException("Temp directory is not configured."); - } - logger.fine("mime type supplied: " + suppliedContentType); - // Let's try our own utilities (Jhove, etc.) to determine the file type - // of the uploaded file. (We may already have a mime type supplied for this - // file - maybe the type that the browser recognized on upload; or, if - // it's a harvest, maybe the remote server has already given us the type - // for this file... with our own type utility we may or may not do better - // than the type supplied: - // -- L.A. - String recognizedType = null; - - try { - recognizedType = determineFileType(tempFile.toFile(), fileName); - logger.fine("File utility recognized the file as " + recognizedType); - if (recognizedType != null && !recognizedType.equals("")) { - if (useRecognizedType(suppliedContentType, recognizedType)) { - finalType = recognizedType; - } - } - - } catch (Exception ex) { - logger.warning("Failed to run the file utility mime type check on file " + fileName); - } - - if (finalType == null) { - finalType = (suppliedContentType == null || suppliedContentType.equals("")) - ? MIME_TYPE_UNDETERMINED_DEFAULT - : suppliedContentType; - } - - // A few special cases: - // if this is a gzipped FITS file, we'll uncompress it, and ingest it as - // a regular FITS file: - if (finalType.equals("application/fits-gzipped")) { - - InputStream uncompressedIn = null; - String finalFileName = fileName; - // if the file name had the ".gz" extension, remove it, - // since we are going to uncompress it: - if (fileName != null && fileName.matches(".*\\.gz$")) { - finalFileName = fileName.replaceAll("\\.gz$", ""); - } - - DataFile datafile = null; - try { - uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile())); - File unZippedTempFile = saveInputStreamInTempFile(uncompressedIn, fileSizeLimit); - datafile = createSingleDataFile(version, unZippedTempFile, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT, systemConfig.getFileFixityChecksumAlgorithm()); - } catch (IOException | FileExceedsMaxSizeException | FileExceedsStorageQuotaException ioex) { - datafile = null; - } finally { - if (uncompressedIn != null) { - try { - uncompressedIn.close(); - } catch (IOException e) { - } - } - } - - // If we were able to produce an uncompressed file, we'll use it - // to create and return a final DataFile; if not, we're not going - // to do anything - and then a new DataFile will be created further - // down, from the original, uncompressed file. - if (datafile != null) { - // remove the compressed temp file: - try { - tempFile.toFile().delete(); - } catch (SecurityException ex) { - // (this is very non-fatal) - logger.warning("Failed to delete temporary file " + tempFile.toString()); - } - - datafiles.add(datafile); - return CreateDataFileResult.success(fileName, finalType, datafiles); - } - - // If it's a ZIP file, we are going to unpack it and create multiple - // DataFile objects from its contents: - } else if (finalType.equals("application/zip")) { - - ZipInputStream unZippedIn = null; - ZipEntry zipEntry = null; - - int fileNumberLimit = systemConfig.getZipUploadFilesLimit(); - - try { - Charset charset = null; - /* - TODO: (?) - We may want to investigate somehow letting the user specify - the charset for the filenames in the zip file... - - otherwise, ZipInputStream bails out if it encounteres a file - name that's not valid in the current charest (i.e., UTF-8, in - our case). It would be a bit trickier than what we're doing for - SPSS tabular ingests - with the lang. encoding pulldown menu - - because this encoding needs to be specified *before* we upload and - attempt to unzip the file. - -- L.A. 4.0 beta12 - logger.info("default charset is "+Charset.defaultCharset().name()); - if (Charset.isSupported("US-ASCII")) { - logger.info("charset US-ASCII is supported."); - charset = Charset.forName("US-ASCII"); - if (charset != null) { - logger.info("was able to obtain charset for US-ASCII"); - } - - } - */ - - if (charset != null) { - unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset); - } else { - unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile())); - } - - while (true) { - try { - zipEntry = unZippedIn.getNextEntry(); - } catch (IllegalArgumentException iaex) { - // Note: - // ZipInputStream documentation doesn't even mention that - // getNextEntry() throws an IllegalArgumentException! - // but that's what happens if the file name of the next - // entry is not valid in the current CharSet. - // -- L.A. - warningMessage = "Failed to unpack Zip file. (Unknown Character Set used in a file name?) Saving the file as is."; - logger.warning(warningMessage); - throw new IOException(); - } - - if (zipEntry == null) { - break; - } - // Note that some zip entries may be directories - we - // simply skip them: - - if (!zipEntry.isDirectory()) { - if (datafiles.size() > fileNumberLimit) { - logger.warning("Zip upload - too many files."); - warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit - + "); please upload a zip archive with fewer files, if you want them to be ingested " - + "as individual DataFiles."; - throw new IOException(); - } - - String fileEntryName = zipEntry.getName(); - logger.fine("ZipEntry, file: " + fileEntryName); - - if (fileEntryName != null && !fileEntryName.equals("")) { - - String shortName = fileEntryName.replaceFirst("^.*[\\/]", ""); - - // Check if it's a "fake" file - a zip archive entry - // created for a MacOS X filesystem element: (these - // start with "._") - if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) { - // OK, this seems like an OK file entry - we'll try - // to read it and create a DataFile with it: - - File unZippedTempFile = saveInputStreamInTempFile(unZippedIn, fileSizeLimit); - DataFile datafile = createSingleDataFile(version, unZippedTempFile, null, shortName, - MIME_TYPE_UNDETERMINED_DEFAULT, - systemConfig.getFileFixityChecksumAlgorithm(), null, false); - - if (!fileEntryName.equals(shortName)) { - // If the filename looks like a hierarchical folder name (i.e., contains slashes and backslashes), - // we'll extract the directory name; then subject it to some "aggressive sanitizing" - strip all - // the leading, trailing and duplicate slashes; then replace all the characters that - // don't pass our validation rules. - String directoryName = fileEntryName.replaceFirst("[\\\\/][\\\\/]*[^\\\\/]*$", ""); - directoryName = StringUtil.sanitizeFileDirectory(directoryName, true); - // if (!"".equals(directoryName)) { - if (!StringUtil.isEmpty(directoryName)) { - logger.fine("setting the directory label to " + directoryName); - datafile.getFileMetadata().setDirectoryLabel(directoryName); - } - } - - if (datafile != null) { - // We have created this datafile with the mime type "unknown"; - // Now that we have it saved in a temporary location, - // let's try and determine its real type: - - String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier(); - - try { - recognizedType = determineFileType(new File(tempFileName), shortName); - logger.fine("File utility recognized unzipped file as " + recognizedType); - if (recognizedType != null && !recognizedType.equals("")) { - datafile.setContentType(recognizedType); - } - } catch (Exception ex) { - logger.warning("Failed to run the file utility mime type check on file " + fileName); - } - - datafiles.add(datafile); - } - } - } - } - unZippedIn.closeEntry(); - - } - - } catch (IOException ioex) { - // just clear the datafiles list and let - // ingest default to creating a single DataFile out - // of the unzipped file. - logger.warning("Unzipping failed; rolling back to saving the file as is."); - if (warningMessage == null) { - warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed"); - } - - datafiles.clear(); - } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { - logger.warning("One of the unzipped files exceeds the size limit; resorting to saving the file as is. " + femsx.getMessage()); - warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.size", Arrays.asList(FileSizeChecker.bytesToHumanReadable(fileSizeLimit))); - datafiles.clear(); - } finally { - if (unZippedIn != null) { - try { - unZippedIn.close(); - } catch (Exception zEx) { - } - } - } - if (datafiles.size() > 0) { - // link the data files to the dataset/version: - // (except we no longer want to do this! -- 4.6) - /*Iterator itf = datafiles.iterator(); - while (itf.hasNext()) { - DataFile datafile = itf.next(); - datafile.setOwner(version.getDataset()); - if (version.getFileMetadatas() == null) { - version.setFileMetadatas(new ArrayList()); - } - version.getFileMetadatas().add(datafile.getFileMetadata()); - datafile.getFileMetadata().setDatasetVersion(version); - - version.getDataset().getFiles().add(datafile); - } */ - // remove the uploaded zip file: - try { - Files.delete(tempFile); - } catch (IOException ioex) { - // do nothing - it's just a temp file. - logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); - } - // and return: - return CreateDataFileResult.success(fileName, finalType, datafiles); - } - - } else if (finalType.equalsIgnoreCase(ShapefileHandler.SHAPEFILE_FILE_TYPE)) { - // Shape files may have to be split into multiple files, - // one zip archive per each complete set of shape files: - - // File rezipFolder = new File(this.getFilesTempDirectory()); - File rezipFolder = getShapefileUnzipTempDirectory(); - - IngestServiceShapefileHelper shpIngestHelper; - shpIngestHelper = new IngestServiceShapefileHelper(tempFile.toFile(), rezipFolder); - - boolean didProcessWork = shpIngestHelper.processFile(); - if (!(didProcessWork)) { - logger.severe("Processing of zipped shapefile failed."); - return CreateDataFileResult.error(fileName, finalType); - } - - try { - for (File finalFile : shpIngestHelper.getFinalRezippedFiles()) { - FileInputStream finalFileInputStream = new FileInputStream(finalFile); - finalType = determineContentType(finalFile); - if (finalType == null) { - logger.warning("Content type is null; but should default to 'MIME_TYPE_UNDETERMINED_DEFAULT'"); - continue; - } - - File unZippedShapeTempFile = saveInputStreamInTempFile(finalFileInputStream, fileSizeLimit); - DataFile new_datafile = createSingleDataFile(version, unZippedShapeTempFile, finalFile.getName(), finalType, systemConfig.getFileFixityChecksumAlgorithm()); - String directoryName = null; - String absolutePathName = finalFile.getParent(); - if (absolutePathName != null) { - if (absolutePathName.length() > rezipFolder.toString().length()) { - // This file lives in a subfolder - we want to - // preserve it in the FileMetadata: - directoryName = absolutePathName.substring(rezipFolder.toString().length() + 1); - - if (!StringUtil.isEmpty(directoryName)) { - new_datafile.getFileMetadata().setDirectoryLabel(directoryName); - } - } - } - if (new_datafile != null) { - datafiles.add(new_datafile); - } else { - logger.severe("Could not add part of rezipped shapefile. new_datafile was null: " + finalFile.getName()); - } - finalFileInputStream.close(); - - } - } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { - logger.severe("One of the unzipped shape files exceeded the size limit; giving up. " + femsx.getMessage()); - datafiles.clear(); - } - - // Delete the temp directory used for unzipping - // The try-catch is due to error encountered in using NFS for stocking file, - // cf. https://github.com/IQSS/dataverse/issues/5909 - try { - FileUtils.deleteDirectory(rezipFolder); - } catch (IOException ioex) { - // do nothing - it's a tempo folder. - logger.warning("Could not remove temp folder, error message : " + ioex.getMessage()); - } - - if (datafiles.size() > 0) { - // remove the uploaded zip file: - try { - Files.delete(tempFile); - } catch (IOException ioex) { - // do nothing - it's just a temp file. - logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); - } catch (SecurityException se) { - logger.warning("Unable to delete: " + tempFile.toString() + "due to Security Exception: " - + se.getMessage()); - } - return CreateDataFileResult.success(fileName, finalType, datafiles); - } else { - logger.severe("No files added from directory of rezipped shapefiles"); - } - return CreateDataFileResult.error(fileName, finalType); - - } else if (finalType.equalsIgnoreCase(BagItFileHandler.FILE_TYPE)) { - Optional bagItFileHandler = CDI.current().select(BagItFileHandlerFactory.class).get().getBagItFileHandler(); - if (bagItFileHandler.isPresent()) { - CreateDataFileResult result = bagItFileHandler.get().handleBagItPackage(systemConfig, version, fileName, tempFile.toFile()); - return result; - } - } - } else { - // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied - finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; - String type = determineFileTypeByNameAndExtension(fileName); - if (!StringUtils.isBlank(type)) { - //Use rules for deciding when to trust browser supplied type - if (useRecognizedType(finalType, type)) { - finalType = type; - } - logger.fine("Supplied type: " + suppliedContentType + ", finalType: " + finalType); - } - } - // Finally, if none of the special cases above were applicable (or - // if we were unable to unpack an uploaded file, etc.), we'll just - // create and return a single DataFile: - File newFile = null; - if (tempFile != null) { - newFile = tempFile.toFile(); - } - - - DataFile datafile = createSingleDataFile(version, newFile, newStorageIdentifier, fileName, finalType, newCheckSumType, newCheckSum); - File f = null; - if (tempFile != null) { - f = tempFile.toFile(); - } - if (datafile != null && ((f != null) || (newStorageIdentifier != null))) { - - if (warningMessage != null) { - createIngestFailureReport(datafile, warningMessage); - datafile.SetIngestProblem(); - } - datafiles.add(datafile); - - return CreateDataFileResult.success(fileName, finalType, datafiles); - } - - return CreateDataFileResult.error(fileName, finalType); - } // end createDataFiles - public static boolean useRecognizedType(String suppliedContentType, String recognizedType) { // is it any better than the type that was supplied to us, From 1a22b11c65353f7c2bd0677b2f4bb2e134aebcb5 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 24 Feb 2023 17:02:39 -0500 Subject: [PATCH 0057/1525] Added info messages about the remaining storage quota, if enforced, for the user on the upload page. #9361 --- .../iq/dataverse/EditDatafilesPage.java | 20 +++++++++++++++---- src/main/java/propertyFiles/Bundle.properties | 3 ++- src/main/webapp/editFilesFragment.xhtml | 5 +++++ 3 files changed, 23 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 928bf635ffa..420642f2fa5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -343,6 +343,11 @@ public Long getMaxFileUploadSizeInBytes() { public String getHumanMaxFileUploadSizeInBytes() { return FileSizeChecker.bytesToHumanReadable(this.maxFileUploadSizeInBytes); } + + public boolean isUnlimitedUploadFileSize() { + + return this.maxFileUploadSizeInBytes == null; + } public Long getMaxTotalUploadSizeInBytes() { return maxTotalUploadSizeInBytes; @@ -351,10 +356,9 @@ public Long getMaxTotalUploadSizeInBytes() { public String getHumanMaxTotalUploadSizeInBytes() { return FileSizeChecker.bytesToHumanReadable(maxTotalUploadSizeInBytes); } - - public boolean isUnlimitedUploadFileSize() { - - return this.maxFileUploadSizeInBytes == null; + + public boolean isStorageQuotaEnforced() { + return maxTotalUploadSizeInBytes != null; } public Long getMaxIngestSizeInBytes() { @@ -524,6 +528,11 @@ public String initCreateMode(String modeToken, DatasetVersion version, MutableBo selectedFiles = selectedFileMetadatasList; this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); + if (systemConfig.isStorageQuotasEnforced()) { + this.maxTotalUploadSizeInBytes = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes(); + } else { + this.maxTotalUploadSizeInBytes = null; + } this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit(); @@ -575,6 +584,9 @@ public String init() { clone = workingVersion.cloneDatasetVersion(); this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); + if (systemConfig.isStorageQuotasEnforced()) { + this.maxTotalUploadSizeInBytes = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes(); + } this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit(); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index c1fd4ebaf10..dd9b398b709 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1659,7 +1659,8 @@ file.select.tooltip=Select Files file.selectAllFiles=Select all {0} files in this dataset. file.dynamicCounter.filesPerPage=Files Per Page file.selectToAddBtn=Select Files to Add -file.selectToAdd.tipLimit=File upload limit is {0} per file. +file.selectToAdd.tipLimit=File upload limit is {0} per file. +file.selectToAdd.tipQuotaRemaining=Storage quota: {0} remaining. file.selectToAdd.tipMaxNumFiles=Maximum of {0} {0, choice, 0#files|1#file|2#files} per upload. file.selectToAdd.tipTabularLimit=Tabular file ingest is limited to {2}. file.selectToAdd.tipPerFileTabularLimit=Ingest is limited to the following file sizes based on their format: {0}. diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 99db5abd2dc..77f7aab2f76 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -80,6 +80,11 @@ rendered="#{!EditDatafilesPage.isUnlimitedUploadFileSize()}"> + + + + From 9bdcba600915d44117db9ae8ca046c1e32d07c3d Mon Sep 17 00:00:00 2001 From: lubitchv Date: Mon, 13 Mar 2023 11:54:39 -0400 Subject: [PATCH 0058/1525] Alternative Title --- conf/solr/8.11.1/schema.xml | 2 +- scripts/api/data/metadatablocks/citation.tsv | 2 +- .../dataverse/export/ddi/DdiExportUtil.java | 8 +++-- .../export/openaire/OpenAireExportUtil.java | 32 +++++++++++++++++-- .../dataverse/export/dataset-all-defaults.txt | 4 +-- .../dataset-create-new-all-ddi-fields.json | 4 +-- 6 files changed, 42 insertions(+), 10 deletions(-) diff --git a/conf/solr/8.11.1/schema.xml b/conf/solr/8.11.1/schema.xml index f11938621fc..5fa9c7f0616 100644 --- a/conf/solr/8.11.1/schema.xml +++ b/conf/solr/8.11.1/schema.xml @@ -250,7 +250,7 @@ - + diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv index be32bb7134e..6b0f231c7b7 100644 --- a/scripts/api/data/metadatablocks/citation.tsv +++ b/scripts/api/data/metadatablocks/citation.tsv @@ -3,7 +3,7 @@ #datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI title Title The main title of the Dataset text 0 TRUE FALSE FALSE FALSE TRUE TRUE citation http://purl.org/dc/terms/title subtitle Subtitle A secondary title that amplifies or states certain limitations on the main title text 1 FALSE FALSE FALSE FALSE FALSE FALSE citation - alternativeTitle Alternative Title Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title text 2 FALSE FALSE FALSE FALSE FALSE FALSE citation http://purl.org/dc/terms/alternative + alternativeTitle Alternative Title Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title text 2 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/alternative alternativeURL Alternative URL Another URL where one can view or access the data in the Dataset, e.g. a project or personal webpage https:// url 3 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE citation https://schema.org/distribution otherId Other Identifier Another unique identifier for the Dataset (e.g. producer's or another repository's identifier) none 4 : FALSE FALSE TRUE FALSE FALSE FALSE citation otherIdAgency Agency The name of the agency that generated the other identifier text 5 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index eb7632dd03c..eb53473d4d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -202,8 +202,12 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto) writeFullElement(xmlw, "titl", dto2Primitive(version, DatasetFieldConstant.title), datasetDto.getMetadataLanguage()); writeFullElement(xmlw, "subTitl", dto2Primitive(version, DatasetFieldConstant.subTitle)); - writeFullElement(xmlw, "altTitl", dto2Primitive(version, DatasetFieldConstant.alternativeTitle)); - + + FieldDTO altField = dto2FieldDTO( version, DatasetFieldConstant.alternativeTitle, "citation" ); + if (altField != null) { + writeMultipleElement(xmlw, "altTitl", altField, datasetDto.getMetadataLanguage()); + } + xmlw.writeStartElement("IDNo"); writeAttribute(xmlw, "agency", persistentAgency); diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index bea3858a60e..34cb7a4e138 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -7,6 +7,7 @@ import java.util.Map; import java.util.Set; import java.util.logging.Logger; +import java.util.List; import javax.json.JsonObject; import javax.xml.stream.XMLOutputFactory; @@ -368,8 +369,8 @@ public static void writeTitlesElement(XMLStreamWriter xmlw, DatasetVersionDTO da String subtitle = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.subTitle); title_check = writeTitleElement(xmlw, "Subtitle", subtitle, title_check, language); - String alternativeTitle = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.alternativeTitle); - title_check = writeTitleElement(xmlw, "AlternativeTitle", alternativeTitle, title_check, language); + title_check = writeMultipleTitleElement(xmlw, "AlternativeTitle", datasetVersionDTO, "citation", title_check, language); + writeEndTag(xmlw, title_check); } @@ -404,6 +405,33 @@ private static boolean writeTitleElement(XMLStreamWriter xmlw, String titleType, } return title_check; } + + private static boolean writeMultipleTitleElement(XMLStreamWriter xmlw, String titleType, DatasetVersionDTO datasetVersionDTO, String metadataBlockName, boolean title_check, String language) throws XMLStreamException { + MetadataBlockDTO block = datasetVersionDTO.getMetadataBlocks().get(metadataBlockName); + if (block != null) { + logger.info("Block is not empty"); + List fieldsBlock = block.getFields(); + if (fieldsBlock != null) { + for (FieldDTO fieldDTO : fieldsBlock) { + logger.info(titleType + " " + fieldDTO.getTypeName()); + if (titleType.toLowerCase().equals(fieldDTO.getTypeName().toLowerCase())) { + logger.info("Found Alt title"); + List fields = fieldDTO.getMultiplePrimitive(); + for (String value : fields) { + if (!writeTitleElement(xmlw, titleType, value, title_check, language)) + title_check = false; + } + break; + } + } + } + } + + return title_check; + } + + + /** * 5, PublicationYear (M) diff --git a/src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt b/src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt index a3f0dffc767..62f2cd37447 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt +++ b/src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt @@ -42,9 +42,9 @@ }, { "typeName": "alternativeTitle", - "multiple": false, + "multiple": true, "typeClass": "primitive", - "value": "Alternative Title" + "value": ["Alternative Title"] }, { "typeName": "alternativeURL", diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json index 1b327c15496..96f058b1b02 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json +++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json @@ -51,9 +51,9 @@ }, { "typeName": "alternativeTitle", - "multiple": false, + "multiple": true, "typeClass": "primitive", - "value": "Alternative Title" + "value": ["Alternative Title"] }, { "typeName": "otherId", From 7b8281c7a0f98350533047dac1790603557e463b Mon Sep 17 00:00:00 2001 From: lubitchv Date: Mon, 13 Mar 2023 13:08:24 -0400 Subject: [PATCH 0059/1525] citation --- scripts/api/data/metadatablocks/citation.tsv | 509 +++++++++---------- 1 file changed, 254 insertions(+), 255 deletions(-) diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv index 6b0f231c7b7..20d858ddb4b 100644 --- a/scripts/api/data/metadatablocks/citation.tsv +++ b/scripts/api/data/metadatablocks/citation.tsv @@ -3,8 +3,8 @@ #datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI title Title The main title of the Dataset text 0 TRUE FALSE FALSE FALSE TRUE TRUE citation http://purl.org/dc/terms/title subtitle Subtitle A secondary title that amplifies or states certain limitations on the main title text 1 FALSE FALSE FALSE FALSE FALSE FALSE citation - alternativeTitle Alternative Title Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title text 2 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/alternative - alternativeURL Alternative URL Another URL where one can view or access the data in the Dataset, e.g. a project or personal webpage https:// url 3 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE citation https://schema.org/distribution + alternativeTitle Alternative Title Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title text 2 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/alternative + alternativeURL Alternative URL Another URL where one can view or access the data in the Dataset, e.g. a project or personal webpage https:// url 3 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE citation https://schema.org/distribution otherId Other Identifier Another unique identifier for the Dataset (e.g. producer's or another repository's identifier) none 4 : FALSE FALSE TRUE FALSE FALSE FALSE citation otherIdAgency Agency The name of the agency that generated the other identifier text 5 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation otherIdValue Identifier Another identifier uniquely identifies the Dataset text 6 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation @@ -24,26 +24,26 @@ keyword Keyword A key term that describes an important aspect of the Dataset and information about any controlled vocabulary used none 20 FALSE FALSE TRUE FALSE TRUE FALSE citation keywordValue Term A key term that describes important aspects of the Dataset text 21 #VALUE TRUE FALSE FALSE TRUE TRUE FALSE keyword citation keywordVocabulary Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 22 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE keyword citation - keywordVocabularyURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 23 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE keyword citation + keywordVocabularyURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 23 "#VALUE" FALSE FALSE FALSE FALSE TRUE FALSE keyword citation topicClassification Topic Classification Indicates a broad, important topic or subject that the Dataset covers and information about any controlled vocabulary used none 24 FALSE FALSE TRUE FALSE FALSE FALSE citation topicClassValue Term A topic or subject term text 25 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE topicClassification citation topicClassVocab Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 26 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation - topicClassVocabURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 27 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation + topicClassVocabURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 27 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation publication Related Publication The article or report that uses the data in the Dataset. The full list of related publications will be displayed on the metadata tab none 28 FALSE FALSE TRUE FALSE TRUE FALSE citation http://purl.org/dc/terms/isReferencedBy publicationCitation Citation The full bibliographic citation for the related publication textbox 29 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/dc/terms/bibliographicCitation publicationIDType Identifier Type The type of identifier that uniquely identifies a related publication text 30 #VALUE: TRUE TRUE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifierScheme publicationIDNumber Identifier The identifier for a related publication text 31 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifier - publicationURL URL The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage https:// url 32 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE publication citation https://schema.org/distribution + publicationURL URL The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage https:// url 32 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE publication citation https://schema.org/distribution notesText Notes Additional information about the Dataset textbox 33 FALSE FALSE FALSE FALSE TRUE FALSE citation language Language A language that the Dataset's files is written in text 34 TRUE TRUE TRUE TRUE FALSE FALSE citation http://purl.org/dc/terms/language producer Producer The entity, such a person or organization, managing the finances or other administrative processes involved in the creation of the Dataset none 35 FALSE FALSE TRUE FALSE FALSE FALSE citation producerName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 36 #VALUE TRUE FALSE FALSE TRUE FALSE TRUE producer citation producerAffiliation Affiliation The name of the entity affiliated with the producer, e.g. an organization's name Organization XYZ text 37 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation producerAbbreviation Abbreviated Name The producer's abbreviated name (e.g. IQSS, ICPSR) text 38 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation - producerURL URL The URL of the producer's website https:// url 39 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE producer citation - producerLogoURL Logo URL The URL of the producer's logo https:// url 40
    FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerURL URL The URL of the producer's website https:// url 39 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerLogoURL Logo URL The URL of the producer's logo https:// url 40 "
    " FALSE FALSE FALSE FALSE FALSE FALSE producer citation productionDate Production Date The date when the data were produced (not distributed, published, or archived) YYYY-MM-DD date 41 TRUE FALSE FALSE TRUE FALSE FALSE citation - productionPlace Production Location The location where the data and any related materials were produced or collected text 42 TRUE FALSE TRUE TRUE FALSE FALSE citation + productionPlace Production Location The location where the data and any related materials were produced or collected text 42 FALSE FALSE FALSE FALSE FALSE FALSE citation contributor Contributor The entity, such as a person or organization, responsible for collecting, managing, or otherwise contributing to the development of the Dataset none 43 : FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/contributor contributorType Type Indicates the type of contribution made to the dataset text 44 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE contributor citation contributorName Name The name of the contributor, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 45 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE contributor citation @@ -54,8 +54,8 @@ distributorName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 50 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE distributor citation distributorAffiliation Affiliation The name of the entity affiliated with the distributor, e.g. an organization's name Organization XYZ text 51 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation distributorAbbreviation Abbreviated Name The distributor's abbreviated name (e.g. IQSS, ICPSR) text 52 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation - distributorURL URL The URL of the distributor's webpage https:// url 53 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE distributor citation - distributorLogoURL Logo URL The URL of the distributor's logo image, used to show the image on the Dataset's page https:// url 54
    FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorURL URL The URL of the distributor's webpage https:// url 53 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorLogoURL Logo URL The URL of the distributor's logo image, used to show the image on the Dataset's page https:// url 54 "
    " FALSE FALSE FALSE FALSE FALSE FALSE distributor citation distributionDate Distribution Date The date when the Dataset was made available for distribution/presentation YYYY-MM-DD date 55 TRUE FALSE FALSE TRUE FALSE FALSE citation depositor Depositor The entity, such as a person or organization, that deposited the Dataset in the repository 1) FamilyName, GivenName or 2) Organization text 56 FALSE FALSE FALSE FALSE FALSE FALSE citation dateOfDeposit Deposit Date The date when the Dataset was deposited into the repository YYYY-MM-DD date 57 FALSE FALSE FALSE TRUE FALSE FALSE citation http://purl.org/dc/terms/dateSubmitted @@ -79,248 +79,247 @@ originOfSources Origin of Historical Sources For historical sources, the origin and any rules followed in establishing them as sources textbox 75 FALSE FALSE FALSE FALSE FALSE FALSE citation characteristicOfSources Characteristic of Sources Characteristics not already noted elsewhere textbox 76 FALSE FALSE FALSE FALSE FALSE FALSE citation accessToSources Documentation and Access to Sources 1) Methods or procedures for accessing data sources and 2) any special permissions needed for access textbox 77 FALSE FALSE FALSE FALSE FALSE FALSE citation -#controlledVocabulary DatasetField Value identifier displayOrder - subject Agricultural Sciences D01 0 - subject Arts and Humanities D0 1 - subject Astronomy and Astrophysics D1 2 - subject Business and Management D2 3 - subject Chemistry D3 4 - subject Computer and Information Science D7 5 - subject Earth and Environmental Sciences D4 6 - subject Engineering D5 7 - subject Law D8 8 - subject Mathematical Sciences D9 9 - subject Medicine, Health and Life Sciences D6 10 - subject Physics D10 11 - subject Social Sciences D11 12 - subject Other D12 13 - publicationIDType ark 0 - publicationIDType arXiv 1 - publicationIDType bibcode 2 - publicationIDType cstr 3 - publicationIDType doi 4 - publicationIDType ean13 5 - publicationIDType eissn 6 - publicationIDType handle 7 - publicationIDType isbn 8 - publicationIDType issn 9 - publicationIDType istc 10 - publicationIDType lissn 11 - publicationIDType lsid 12 - publicationIDType pmid 13 - publicationIDType purl 14 - publicationIDType upc 15 - publicationIDType url 16 - publicationIDType urn 17 - publicationIDType DASH-NRS 18 - contributorType Data Collector 0 - contributorType Data Curator 1 - contributorType Data Manager 2 - contributorType Editor 3 - contributorType Funder 4 - contributorType Hosting Institution 5 - contributorType Project Leader 6 - contributorType Project Manager 7 - contributorType Project Member 8 - contributorType Related Person 9 - contributorType Researcher 10 - contributorType Research Group 11 - contributorType Rights Holder 12 - contributorType Sponsor 13 - contributorType Supervisor 14 - contributorType Work Package Leader 15 - contributorType Other 16 - authorIdentifierScheme ORCID 0 - authorIdentifierScheme ISNI 1 - authorIdentifierScheme LCNA 2 - authorIdentifierScheme VIAF 3 - authorIdentifierScheme GND 4 - authorIdentifierScheme DAI 5 - authorIdentifierScheme ResearcherID 6 - authorIdentifierScheme ScopusID 7 - language Abkhaz 0 - language Afar 1 aar aa - language Afrikaans 2 afr af - language Akan 3 aka ak - language Albanian 4 sqi alb sq - language Amharic 5 amh am - language Arabic 6 ara ar - language Aragonese 7 arg an - language Armenian 8 hye arm hy - language Assamese 9 asm as - language Avaric 10 ava av - language Avestan 11 ave ae - language Aymara 12 aym ay - language Azerbaijani 13 aze az - language Bambara 14 bam bm - language Bashkir 15 bak ba - language Basque 16 eus baq eu - language Belarusian 17 bel be - language Bengali, Bangla 18 ben bn - language Bihari 19 bih bh - language Bislama 20 bis bi - language Bosnian 21 bos bs - language Breton 22 bre br - language Bulgarian 23 bul bg - language Burmese 24 mya bur my - language Catalan,Valencian 25 cat ca - language Chamorro 26 cha ch - language Chechen 27 che ce - language Chichewa, Chewa, Nyanja 28 nya ny - language Chinese 29 zho chi zh - language Chuvash 30 chv cv - language Cornish 31 cor kw - language Corsican 32 cos co - language Cree 33 cre cr - language Croatian 34 hrv src hr - language Czech 35 ces cze cs - language Danish 36 dan da - language Divehi, Dhivehi, Maldivian 37 div dv - language Dutch 38 nld dut nl - language Dzongkha 39 dzo dz - language English 40 eng en - language Esperanto 41 epo eo - language Estonian 42 est et - language Ewe 43 ewe ee - language Faroese 44 fao fo - language Fijian 45 fij fj - language Finnish 46 fin fi - language French 47 fra fre fr - language Fula, Fulah, Pulaar, Pular 48 ful ff - language Galician 49 glg gl - language Georgian 50 kat geo ka - language German 51 deu ger de - language Greek (modern) 52 gre ell el - language Guaraní 53 grn gn - language Gujarati 54 guj gu - language Haitian, Haitian Creole 55 hat ht - language Hausa 56 hau ha - language Hebrew (modern) 57 heb he - language Herero 58 her hz - language Hindi 59 hin hi - language Hiri Motu 60 hmo ho - language Hungarian 61 hun hu - language Interlingua 62 ina ia - language Indonesian 63 ind id - language Interlingue 64 ile ie - language Irish 65 gle ga - language Igbo 66 ibo ig - language Inupiaq 67 ipk ik - language Ido 68 ido io - language Icelandic 69 isl ice is - language Italian 70 ita it - language Inuktitut 71 iku iu - language Japanese 72 jpn ja - language Javanese 73 jav jv - language Kalaallisut, Greenlandic 74 kal kl - language Kannada 75 kan kn - language Kanuri 76 kau kr - language Kashmiri 77 kas ks - language Kazakh 78 kaz kk - language Khmer 79 khm km - language Kikuyu, Gikuyu 80 kik ki - language Kinyarwanda 81 kin rw - language Kyrgyz 82 - language Komi 83 kom kv - language Kongo 84 kon kg - language Korean 85 kor ko - language Kurdish 86 kur ku - language Kwanyama, Kuanyama 87 kua kj - language Latin 88 lat la - language Luxembourgish, Letzeburgesch 89 ltz lb - language Ganda 90 lug lg - language Limburgish, Limburgan, Limburger 91 lim li - language Lingala 92 lin ln - language Lao 93 lao lo - language Lithuanian 94 lit lt - language Luba-Katanga 95 lub lu - language Latvian 96 lav lv - language Manx 97 glv gv - language Macedonian 98 mkd mac mk - language Malagasy 99 mlg mg - language Malay 100 may msa ms - language Malayalam 101 mal ml - language Maltese 102 mlt mt - language MÄori 103 mao mri mi - language Marathi (MarÄá¹­hÄ«) 104 mar mr - language Marshallese 105 mah mh - language Mixtepec Mixtec 106 mix - language Mongolian 107 mon mn - language Nauru 108 nau na - language Navajo, Navaho 109 nav nv - language Northern Ndebele 110 nde nd - language Nepali 111 nep ne - language Ndonga 112 ndo ng - language Norwegian BokmÃ¥l 113 nob nb - language Norwegian Nynorsk 114 nno nn - language Norwegian 115 nor no - language Nuosu 116 - language Southern Ndebele 117 nbl nr - language Occitan 118 oci oc - language Ojibwe, Ojibwa 119 oji oj - language Old Church Slavonic,Church Slavonic,Old Bulgarian 120 chu cu - language Oromo 121 orm om - language Oriya 122 ori or - language Ossetian, Ossetic 123 oss os - language Panjabi, Punjabi 124 pan pa - language PÄli 125 pli pi - language Persian (Farsi) 126 per fas fa - language Polish 127 pol pl - language Pashto, Pushto 128 pus ps - language Portuguese 129 por pt - language Quechua 130 que qu - language Romansh 131 roh rm - language Kirundi 132 run rn - language Romanian 133 ron rum ro - language Russian 134 rus ru - language Sanskrit (Saá¹ská¹›ta) 135 san sa - language Sardinian 136 srd sc - language Sindhi 137 snd sd - language Northern Sami 138 sme se - language Samoan 139 smo sm - language Sango 140 sag sg - language Serbian 141 srp scc sr - language Scottish Gaelic, Gaelic 142 gla gd - language Shona 143 sna sn - language Sinhala, Sinhalese 144 sin si - language Slovak 145 slk slo sk - language Slovene 146 slv sl - language Somali 147 som so - language Southern Sotho 148 sot st - language Spanish, Castilian 149 spa es - language Sundanese 150 sun su - language Swahili 151 swa sw - language Swati 152 ssw ss - language Swedish 153 swe sv - language Tamil 154 tam ta - language Telugu 155 tel te - language Tajik 156 tgk tg - language Thai 157 tha th - language Tigrinya 158 tir ti - language Tibetan Standard, Tibetan, Central 159 tib bod bo - language Turkmen 160 tuk tk - language Tagalog 161 tgl tl - language Tswana 162 tsn tn - language Tonga (Tonga Islands) 163 ton to - language Turkish 164 tur tr - language Tsonga 165 tso ts - language Tatar 166 tat tt - language Twi 167 twi tw - language Tahitian 168 tah ty - language Uyghur, Uighur 169 uig ug - language Ukrainian 170 ukr uk - language Urdu 171 urd ur - language Uzbek 172 uzb uz - language Venda 173 ven ve - language Vietnamese 174 vie vi - language Volapük 175 vol vo - language Walloon 176 wln wa - language Welsh 177 cym wel cy - language Wolof 178 wol wo - language Western Frisian 179 fry fy - language Xhosa 180 xho xh - language Yiddish 181 yid yi - language Yoruba 182 yor yo - language Zhuang, Chuang 183 zha za - language Zulu 184 zul zu - language Not applicable 185 +#controlledVocabulary DatasetField Value identifier displayOrder + subject Agricultural Sciences D01 0 + subject Arts and Humanities D0 1 + subject Astronomy and Astrophysics D1 2 + subject Business and Management D2 3 + subject Chemistry D3 4 + subject Computer and Information Science D7 5 + subject Earth and Environmental Sciences D4 6 + subject Engineering D5 7 + subject Law D8 8 + subject Mathematical Sciences D9 9 + subject Medicine, Health and Life Sciences D6 10 + subject Physics D10 11 + subject Social Sciences D11 12 + subject Other D12 13 + publicationIDType ark 0 + publicationIDType arXiv 1 + publicationIDType bibcode 2 + publicationIDType doi 3 + publicationIDType ean13 4 + publicationIDType eissn 5 + publicationIDType handle 6 + publicationIDType isbn 7 + publicationIDType issn 8 + publicationIDType istc 9 + publicationIDType lissn 10 + publicationIDType lsid 11 + publicationIDType pmid 12 + publicationIDType purl 13 + publicationIDType upc 14 + publicationIDType url 15 + publicationIDType urn 16 + publicationIDType DASH-NRS 17 + contributorType Data Collector 0 + contributorType Data Curator 1 + contributorType Data Manager 2 + contributorType Editor 3 + contributorType Funder 4 + contributorType Hosting Institution 5 + contributorType Project Leader 6 + contributorType Project Manager 7 + contributorType Project Member 8 + contributorType Related Person 9 + contributorType Researcher 10 + contributorType Research Group 11 + contributorType Rights Holder 12 + contributorType Sponsor 13 + contributorType Supervisor 14 + contributorType Work Package Leader 15 + contributorType Other 16 + authorIdentifierScheme ORCID 0 + authorIdentifierScheme ISNI 1 + authorIdentifierScheme LCNA 2 + authorIdentifierScheme VIAF 3 + authorIdentifierScheme GND 4 + authorIdentifierScheme DAI 5 + authorIdentifierScheme ResearcherID 6 + authorIdentifierScheme ScopusID 7 + language Abkhaz 0 + language Afar 1 aar aa + language Afrikaans 2 afr af + language Akan 3 aka ak + language Albanian 4 sqi alb sq + language Amharic 5 amh am + language Arabic 6 ara ar + language Aragonese 7 arg an + language Armenian 8 hye arm hy + language Assamese 9 asm as + language Avaric 10 ava av + language Avestan 11 ave ae + language Aymara 12 aym ay + language Azerbaijani 13 aze az + language Bambara 14 bam bm + language Bashkir 15 bak ba + language Basque 16 eus baq eu + language Belarusian 17 bel be + language Bengali, Bangla 18 ben bn + language Bihari 19 bih bh + language Bislama 20 bis bi + language Bosnian 21 bos bs + language Breton 22 bre br + language Bulgarian 23 bul bg + language Burmese 24 mya bur my + language Catalan,Valencian 25 cat ca + language Chamorro 26 cha ch + language Chechen 27 che ce + language Chichewa, Chewa, Nyanja 28 nya ny + language Chinese 29 zho chi zh + language Chuvash 30 chv cv + language Cornish 31 cor kw + language Corsican 32 cos co + language Cree 33 cre cr + language Croatian 34 hrv src hr + language Czech 35 ces cze cs + language Danish 36 dan da + language Divehi, Dhivehi, Maldivian 37 div dv + language Dutch 38 nld dut nl + language Dzongkha 39 dzo dz + language English 40 eng en + language Esperanto 41 epo eo + language Estonian 42 est et + language Ewe 43 ewe ee + language Faroese 44 fao fo + language Fijian 45 fij fj + language Finnish 46 fin fi + language French 47 fra fre fr + language Fula, Fulah, Pulaar, Pular 48 ful ff + language Galician 49 glg gl + language Georgian 50 kat geo ka + language German 51 deu ger de + language Greek (modern) 52 gre ell el + language Guaraní 53 grn gn + language Gujarati 54 guj gu + language Haitian, Haitian Creole 55 hat ht + language Hausa 56 hau ha + language Hebrew (modern) 57 heb he + language Herero 58 her hz + language Hindi 59 hin hi + language Hiri Motu 60 hmo ho + language Hungarian 61 hun hu + language Interlingua 62 ina ia + language Indonesian 63 ind id + language Interlingue 64 ile ie + language Irish 65 gle ga + language Igbo 66 ibo ig + language Inupiaq 67 ipk ik + language Ido 68 ido io + language Icelandic 69 isl ice is + language Italian 70 ita it + language Inuktitut 71 iku iu + language Japanese 72 jpn ja + language Javanese 73 jav jv + language Kalaallisut, Greenlandic 74 kal kl + language Kannada 75 kan kn + language Kanuri 76 kau kr + language Kashmiri 77 kas ks + language Kazakh 78 kaz kk + language Khmer 79 khm km + language Kikuyu, Gikuyu 80 kik ki + language Kinyarwanda 81 kin rw + language Kyrgyz 82 + language Komi 83 kom kv + language Kongo 84 kon kg + language Korean 85 kor ko + language Kurdish 86 kur ku + language Kwanyama, Kuanyama 87 kua kj + language Latin 88 lat la + language Luxembourgish, Letzeburgesch 89 ltz lb + language Ganda 90 lug lg + language Limburgish, Limburgan, Limburger 91 lim li + language Lingala 92 lin ln + language Lao 93 lao lo + language Lithuanian 94 lit lt + language Luba-Katanga 95 lub lu + language Latvian 96 lav lv + language Manx 97 glv gv + language Macedonian 98 mkd mac mk + language Malagasy 99 mlg mg + language Malay 100 may msa ms + language Malayalam 101 mal ml + language Maltese 102 mlt mt + language MÄori 103 mao mri mi + language Marathi (MarÄá¹­hÄ«) 104 mar mr + language Marshallese 105 mah mh + language Mixtepec Mixtec 106 mix + language Mongolian 107 mon mn + language Nauru 108 nau na + language Navajo, Navaho 109 nav nv + language Northern Ndebele 110 nde nd + language Nepali 111 nep ne + language Ndonga 112 ndo ng + language Norwegian BokmÃ¥l 113 nob nb + language Norwegian Nynorsk 114 nno nn + language Norwegian 115 nor no + language Nuosu 116 + language Southern Ndebele 117 nbl nr + language Occitan 118 oci oc + language Ojibwe, Ojibwa 119 oji oj + language Old Church Slavonic,Church Slavonic,Old Bulgarian 120 chu cu + language Oromo 121 orm om + language Oriya 122 ori or + language Ossetian, Ossetic 123 oss os + language Panjabi, Punjabi 124 pan pa + language PÄli 125 pli pi + language Persian (Farsi) 126 per fas fa + language Polish 127 pol pl + language Pashto, Pushto 128 pus ps + language Portuguese 129 por pt + language Quechua 130 que qu + language Romansh 131 roh rm + language Kirundi 132 run rn + language Romanian 133 ron rum ro + language Russian 134 rus ru + language Sanskrit (Saá¹ská¹›ta) 135 san sa + language Sardinian 136 srd sc + language Sindhi 137 snd sd + language Northern Sami 138 sme se + language Samoan 139 smo sm + language Sango 140 sag sg + language Serbian 141 srp scc sr + language Scottish Gaelic, Gaelic 142 gla gd + language Shona 143 sna sn + language Sinhala, Sinhalese 144 sin si + language Slovak 145 slk slo sk + language Slovene 146 slv sl + language Somali 147 som so + language Southern Sotho 148 sot st + language Spanish, Castilian 149 spa es + language Sundanese 150 sun su + language Swahili 151 swa sw + language Swati 152 ssw ss + language Swedish 153 swe sv + language Tamil 154 tam ta + language Telugu 155 tel te + language Tajik 156 tgk tg + language Thai 157 tha th + language Tigrinya 158 tir ti + language Tibetan Standard, Tibetan, Central 159 tib bod bo + language Turkmen 160 tuk tk + language Tagalog 161 tgl tl + language Tswana 162 tsn tn + language Tonga (Tonga Islands) 163 ton to + language Turkish 164 tur tr + language Tsonga 165 tso ts + language Tatar 166 tat tt + language Twi 167 twi tw + language Tahitian 168 tah ty + language Uyghur, Uighur 169 uig ug + language Ukrainian 170 ukr uk + language Urdu 171 urd ur + language Uzbek 172 uzb uz + language Venda 173 ven ve + language Vietnamese 174 vie vi + language Volapük 175 vol vo + language Walloon 176 wln wa + language Welsh 177 cym wel cy + language Wolof 178 wol wo + language Western Frisian 179 fry fy + language Xhosa 180 xho xh + language Yiddish 181 yid yi + language Yoruba 182 yor yo + language Zhuang, Chuang 183 zha za + language Zulu 184 zul zu + language Not applicable 185 From e5a356a5132cb7296ca1878f0120f0a52faebbdb Mon Sep 17 00:00:00 2001 From: lubitchv Date: Mon, 13 Mar 2023 13:35:42 -0400 Subject: [PATCH 0060/1525] release notes --- doc/release-notes/9428-alternative-title.md | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 doc/release-notes/9428-alternative-title.md diff --git a/doc/release-notes/9428-alternative-title.md b/doc/release-notes/9428-alternative-title.md new file mode 100644 index 00000000000..d6eaa680612 --- /dev/null +++ b/doc/release-notes/9428-alternative-title.md @@ -0,0 +1,6 @@ +Alternative Title is made repeatable. +- One will need to update database with updated citation block. +`curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv` +- One will also need to update solr schema: +Change in "alternativeTitle" field multiValued="true" in `/usr/local/solr/solr-8.11.1/server/solr/collection1/conf/schema.xml` +Reload solr schema: `curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1"` From efeb04975ad03a226eddf0d2f9f0042cb44cca7a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 14 Mar 2023 14:09:28 -0400 Subject: [PATCH 0061/1525] Change to fine logging --- .../dataverse/engine/command/impl/AbstractDatasetCommand.java | 4 ++-- .../engine/command/impl/UpdateDatasetVersionCommand.java | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java index 865993b19ad..ce76b7ab03e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java @@ -229,9 +229,9 @@ protected void checkSystemMetadataKeyIfNeeded(DatasetVersion newVersion, Dataset String smdbString = JvmSettings.MDB_SYSTEM_KEY_FOR.lookupOptional(mdb.getName()) .orElse(null); if (smdbString != null) { - logger.info("Found key: " + smdbString); + logger.fine("Found key: " + smdbString); String mdKey = getRequest().getSystemMetadataBlockKeyFor(mdb.getName()); - logger.info("Found supplied key: " + mdKey); + logger.fine("Found supplied key: " + mdKey); if (mdKey == null || !mdKey.equalsIgnoreCase(smdbString)) { throw new IllegalCommandException("Updating system metadata in block " + mdb.getName() + " requires a valid key", this); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index 25706574c9a..3a5364451f0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -114,7 +114,6 @@ public Dataset execute(CommandContext ctxt) throws CommandException { * */ if(persistedVersion==null) { - logger.info("No Clone"); Long id = getDataset().getLatestVersion().getId(); persistedVersion = ctxt.datasetVersion().find(id!=null ? id: getDataset().getLatestVersionForCopy().getId()); } From 5d81fc7042e81b152cc89773a3e67afd47c17e5a Mon Sep 17 00:00:00 2001 From: lubitchv Date: Tue, 14 Mar 2023 16:15:48 -0400 Subject: [PATCH 0062/1525] import --- .../iq/dataverse/api/imports/ImportDDIServiceBean.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index d9433832309..f7f0e30ea6e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -1396,6 +1396,7 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th private void processTitlStmt(XMLStreamReader xmlr, DatasetDTO datasetDTO) throws XMLStreamException, ImportException { MetadataBlockDTO citation = datasetDTO.getDatasetVersion().getMetadataBlocks().get("citation"); List> otherIds = new ArrayList<>(); + List altTitles = new ArrayList<>(); for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { if (event == XMLStreamConstants.START_ELEMENT) { @@ -1406,8 +1407,7 @@ private void processTitlStmt(XMLStreamReader xmlr, DatasetDTO datasetDTO) throws FieldDTO field = FieldDTO.createPrimitiveFieldDTO("subtitle", parseText(xmlr)); citation.getFields().add(field); } else if (xmlr.getLocalName().equals("altTitl")) { - FieldDTO field = FieldDTO.createPrimitiveFieldDTO("alternativeTitle", parseText(xmlr)); - citation.getFields().add(field); + altTitles.add(parseText(xmlr)); } else if (xmlr.getLocalName().equals("IDNo")) { if ( AGENCY_HANDLE.equals( xmlr.getAttributeValue(null, "agency") ) || AGENCY_DOI.equals( xmlr.getAttributeValue(null, "agency") ) ) { importGenericService.reassignIdentifierAsGlobalId(parseText(xmlr), datasetDTO); @@ -1435,6 +1435,9 @@ private void processTitlStmt(XMLStreamReader xmlr, DatasetDTO datasetDTO) throws if (otherIds.size()>0) { citation.addField(FieldDTO.createMultipleCompoundFieldDTO("otherId", otherIds)); } + if (altTitles.size()>0) { + citation.addField(FieldDTO.createMultiplePrimitiveFieldDTO("alternativeTitle", altTitles)); + } return; } } From 991c5f9faf5a378c0c9e21848788d24cb918fbbc Mon Sep 17 00:00:00 2001 From: lubitchv Date: Wed, 15 Mar 2023 17:20:01 -0400 Subject: [PATCH 0063/1525] prodPlac --- scripts/api/data/metadatablocks/citation.tsv | 507 +++++++++--------- .../api/imports/ImportDDIServiceBean.java | 6 +- .../dataverse/export/ddi/DdiExportUtil.java | 9 +- .../edu/harvard/iq/dataverse/api/AdminIT.java | 2 +- .../dataset-create-new-all-ddi-fields.json | 4 +- 5 files changed, 268 insertions(+), 260 deletions(-) diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv index 20d858ddb4b..3fbc38438b8 100644 --- a/scripts/api/data/metadatablocks/citation.tsv +++ b/scripts/api/data/metadatablocks/citation.tsv @@ -4,7 +4,7 @@ title Title The main title of the Dataset text 0 TRUE FALSE FALSE FALSE TRUE TRUE citation http://purl.org/dc/terms/title subtitle Subtitle A secondary title that amplifies or states certain limitations on the main title text 1 FALSE FALSE FALSE FALSE FALSE FALSE citation alternativeTitle Alternative Title Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title text 2 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/alternative - alternativeURL Alternative URL Another URL where one can view or access the data in the Dataset, e.g. a project or personal webpage https:// url 3 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE citation https://schema.org/distribution + alternativeURL Alternative URL Another URL where one can view or access the data in the Dataset, e.g. a project or personal webpage https:// url 3 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE citation https://schema.org/distribution otherId Other Identifier Another unique identifier for the Dataset (e.g. producer's or another repository's identifier) none 4 : FALSE FALSE TRUE FALSE FALSE FALSE citation otherIdAgency Agency The name of the agency that generated the other identifier text 5 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation otherIdValue Identifier Another identifier uniquely identifies the Dataset text 6 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation @@ -24,26 +24,26 @@ keyword Keyword A key term that describes an important aspect of the Dataset and information about any controlled vocabulary used none 20 FALSE FALSE TRUE FALSE TRUE FALSE citation keywordValue Term A key term that describes important aspects of the Dataset text 21 #VALUE TRUE FALSE FALSE TRUE TRUE FALSE keyword citation keywordVocabulary Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 22 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE keyword citation - keywordVocabularyURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 23 "#VALUE" FALSE FALSE FALSE FALSE TRUE FALSE keyword citation + keywordVocabularyURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 23 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE keyword citation topicClassification Topic Classification Indicates a broad, important topic or subject that the Dataset covers and information about any controlled vocabulary used none 24 FALSE FALSE TRUE FALSE FALSE FALSE citation topicClassValue Term A topic or subject term text 25 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE topicClassification citation topicClassVocab Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 26 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation - topicClassVocabURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 27 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation + topicClassVocabURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 27 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation publication Related Publication The article or report that uses the data in the Dataset. The full list of related publications will be displayed on the metadata tab none 28 FALSE FALSE TRUE FALSE TRUE FALSE citation http://purl.org/dc/terms/isReferencedBy publicationCitation Citation The full bibliographic citation for the related publication textbox 29 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/dc/terms/bibliographicCitation publicationIDType Identifier Type The type of identifier that uniquely identifies a related publication text 30 #VALUE: TRUE TRUE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifierScheme publicationIDNumber Identifier The identifier for a related publication text 31 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifier - publicationURL URL The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage https:// url 32 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE publication citation https://schema.org/distribution + publicationURL URL The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage https:// url 32 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE publication citation https://schema.org/distribution notesText Notes Additional information about the Dataset textbox 33 FALSE FALSE FALSE FALSE TRUE FALSE citation language Language A language that the Dataset's files is written in text 34 TRUE TRUE TRUE TRUE FALSE FALSE citation http://purl.org/dc/terms/language producer Producer The entity, such a person or organization, managing the finances or other administrative processes involved in the creation of the Dataset none 35 FALSE FALSE TRUE FALSE FALSE FALSE citation producerName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 36 #VALUE TRUE FALSE FALSE TRUE FALSE TRUE producer citation producerAffiliation Affiliation The name of the entity affiliated with the producer, e.g. an organization's name Organization XYZ text 37 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation producerAbbreviation Abbreviated Name The producer's abbreviated name (e.g. IQSS, ICPSR) text 38 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation - producerURL URL The URL of the producer's website https:// url 39 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE producer citation - producerLogoURL Logo URL The URL of the producer's logo https:// url 40 "
    " FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerURL URL The URL of the producer's website https:// url 39 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerLogoURL Logo URL The URL of the producer's logo https:// url 40
    FALSE FALSE FALSE FALSE FALSE FALSE producer citation productionDate Production Date The date when the data were produced (not distributed, published, or archived) YYYY-MM-DD date 41 TRUE FALSE FALSE TRUE FALSE FALSE citation - productionPlace Production Location The location where the data and any related materials were produced or collected text 42 FALSE FALSE FALSE FALSE FALSE FALSE citation + productionPlace Production Location The location where the data and any related materials were produced or collected text 42 TRUE FALSE TRUE TRUE FALSE FALSE citation contributor Contributor The entity, such as a person or organization, responsible for collecting, managing, or otherwise contributing to the development of the Dataset none 43 : FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/contributor contributorType Type Indicates the type of contribution made to the dataset text 44 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE contributor citation contributorName Name The name of the contributor, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 45 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE contributor citation @@ -54,8 +54,8 @@ distributorName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 50 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE distributor citation distributorAffiliation Affiliation The name of the entity affiliated with the distributor, e.g. an organization's name Organization XYZ text 51 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation distributorAbbreviation Abbreviated Name The distributor's abbreviated name (e.g. IQSS, ICPSR) text 52 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation - distributorURL URL The URL of the distributor's webpage https:// url 53 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE distributor citation - distributorLogoURL Logo URL The URL of the distributor's logo image, used to show the image on the Dataset's page https:// url 54 "
    " FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorURL URL The URL of the distributor's webpage https:// url 53 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorLogoURL Logo URL The URL of the distributor's logo image, used to show the image on the Dataset's page https:// url 54
    FALSE FALSE FALSE FALSE FALSE FALSE distributor citation distributionDate Distribution Date The date when the Dataset was made available for distribution/presentation YYYY-MM-DD date 55 TRUE FALSE FALSE TRUE FALSE FALSE citation depositor Depositor The entity, such as a person or organization, that deposited the Dataset in the repository 1) FamilyName, GivenName or 2) Organization text 56 FALSE FALSE FALSE FALSE FALSE FALSE citation dateOfDeposit Deposit Date The date when the Dataset was deposited into the repository YYYY-MM-DD date 57 FALSE FALSE FALSE TRUE FALSE FALSE citation http://purl.org/dc/terms/dateSubmitted @@ -79,247 +79,248 @@ originOfSources Origin of Historical Sources For historical sources, the origin and any rules followed in establishing them as sources textbox 75 FALSE FALSE FALSE FALSE FALSE FALSE citation characteristicOfSources Characteristic of Sources Characteristics not already noted elsewhere textbox 76 FALSE FALSE FALSE FALSE FALSE FALSE citation accessToSources Documentation and Access to Sources 1) Methods or procedures for accessing data sources and 2) any special permissions needed for access textbox 77 FALSE FALSE FALSE FALSE FALSE FALSE citation -#controlledVocabulary DatasetField Value identifier displayOrder - subject Agricultural Sciences D01 0 - subject Arts and Humanities D0 1 - subject Astronomy and Astrophysics D1 2 - subject Business and Management D2 3 - subject Chemistry D3 4 - subject Computer and Information Science D7 5 - subject Earth and Environmental Sciences D4 6 - subject Engineering D5 7 - subject Law D8 8 - subject Mathematical Sciences D9 9 - subject Medicine, Health and Life Sciences D6 10 - subject Physics D10 11 - subject Social Sciences D11 12 - subject Other D12 13 - publicationIDType ark 0 - publicationIDType arXiv 1 - publicationIDType bibcode 2 - publicationIDType doi 3 - publicationIDType ean13 4 - publicationIDType eissn 5 - publicationIDType handle 6 - publicationIDType isbn 7 - publicationIDType issn 8 - publicationIDType istc 9 - publicationIDType lissn 10 - publicationIDType lsid 11 - publicationIDType pmid 12 - publicationIDType purl 13 - publicationIDType upc 14 - publicationIDType url 15 - publicationIDType urn 16 - publicationIDType DASH-NRS 17 - contributorType Data Collector 0 - contributorType Data Curator 1 - contributorType Data Manager 2 - contributorType Editor 3 - contributorType Funder 4 - contributorType Hosting Institution 5 - contributorType Project Leader 6 - contributorType Project Manager 7 - contributorType Project Member 8 - contributorType Related Person 9 - contributorType Researcher 10 - contributorType Research Group 11 - contributorType Rights Holder 12 - contributorType Sponsor 13 - contributorType Supervisor 14 - contributorType Work Package Leader 15 - contributorType Other 16 - authorIdentifierScheme ORCID 0 - authorIdentifierScheme ISNI 1 - authorIdentifierScheme LCNA 2 - authorIdentifierScheme VIAF 3 - authorIdentifierScheme GND 4 - authorIdentifierScheme DAI 5 - authorIdentifierScheme ResearcherID 6 - authorIdentifierScheme ScopusID 7 - language Abkhaz 0 - language Afar 1 aar aa - language Afrikaans 2 afr af - language Akan 3 aka ak - language Albanian 4 sqi alb sq - language Amharic 5 amh am - language Arabic 6 ara ar - language Aragonese 7 arg an - language Armenian 8 hye arm hy - language Assamese 9 asm as - language Avaric 10 ava av - language Avestan 11 ave ae - language Aymara 12 aym ay - language Azerbaijani 13 aze az - language Bambara 14 bam bm - language Bashkir 15 bak ba - language Basque 16 eus baq eu - language Belarusian 17 bel be - language Bengali, Bangla 18 ben bn - language Bihari 19 bih bh - language Bislama 20 bis bi - language Bosnian 21 bos bs - language Breton 22 bre br - language Bulgarian 23 bul bg - language Burmese 24 mya bur my - language Catalan,Valencian 25 cat ca - language Chamorro 26 cha ch - language Chechen 27 che ce - language Chichewa, Chewa, Nyanja 28 nya ny - language Chinese 29 zho chi zh - language Chuvash 30 chv cv - language Cornish 31 cor kw - language Corsican 32 cos co - language Cree 33 cre cr - language Croatian 34 hrv src hr - language Czech 35 ces cze cs - language Danish 36 dan da - language Divehi, Dhivehi, Maldivian 37 div dv - language Dutch 38 nld dut nl - language Dzongkha 39 dzo dz - language English 40 eng en - language Esperanto 41 epo eo - language Estonian 42 est et - language Ewe 43 ewe ee - language Faroese 44 fao fo - language Fijian 45 fij fj - language Finnish 46 fin fi - language French 47 fra fre fr - language Fula, Fulah, Pulaar, Pular 48 ful ff - language Galician 49 glg gl - language Georgian 50 kat geo ka - language German 51 deu ger de - language Greek (modern) 52 gre ell el - language Guaraní 53 grn gn - language Gujarati 54 guj gu - language Haitian, Haitian Creole 55 hat ht - language Hausa 56 hau ha - language Hebrew (modern) 57 heb he - language Herero 58 her hz - language Hindi 59 hin hi - language Hiri Motu 60 hmo ho - language Hungarian 61 hun hu - language Interlingua 62 ina ia - language Indonesian 63 ind id - language Interlingue 64 ile ie - language Irish 65 gle ga - language Igbo 66 ibo ig - language Inupiaq 67 ipk ik - language Ido 68 ido io - language Icelandic 69 isl ice is - language Italian 70 ita it - language Inuktitut 71 iku iu - language Japanese 72 jpn ja - language Javanese 73 jav jv - language Kalaallisut, Greenlandic 74 kal kl - language Kannada 75 kan kn - language Kanuri 76 kau kr - language Kashmiri 77 kas ks - language Kazakh 78 kaz kk - language Khmer 79 khm km - language Kikuyu, Gikuyu 80 kik ki - language Kinyarwanda 81 kin rw - language Kyrgyz 82 - language Komi 83 kom kv - language Kongo 84 kon kg - language Korean 85 kor ko - language Kurdish 86 kur ku - language Kwanyama, Kuanyama 87 kua kj - language Latin 88 lat la - language Luxembourgish, Letzeburgesch 89 ltz lb - language Ganda 90 lug lg - language Limburgish, Limburgan, Limburger 91 lim li - language Lingala 92 lin ln - language Lao 93 lao lo - language Lithuanian 94 lit lt - language Luba-Katanga 95 lub lu - language Latvian 96 lav lv - language Manx 97 glv gv - language Macedonian 98 mkd mac mk - language Malagasy 99 mlg mg - language Malay 100 may msa ms - language Malayalam 101 mal ml - language Maltese 102 mlt mt - language MÄori 103 mao mri mi - language Marathi (MarÄá¹­hÄ«) 104 mar mr - language Marshallese 105 mah mh - language Mixtepec Mixtec 106 mix - language Mongolian 107 mon mn - language Nauru 108 nau na - language Navajo, Navaho 109 nav nv - language Northern Ndebele 110 nde nd - language Nepali 111 nep ne - language Ndonga 112 ndo ng - language Norwegian BokmÃ¥l 113 nob nb - language Norwegian Nynorsk 114 nno nn - language Norwegian 115 nor no - language Nuosu 116 - language Southern Ndebele 117 nbl nr - language Occitan 118 oci oc - language Ojibwe, Ojibwa 119 oji oj - language Old Church Slavonic,Church Slavonic,Old Bulgarian 120 chu cu - language Oromo 121 orm om - language Oriya 122 ori or - language Ossetian, Ossetic 123 oss os - language Panjabi, Punjabi 124 pan pa - language PÄli 125 pli pi - language Persian (Farsi) 126 per fas fa - language Polish 127 pol pl - language Pashto, Pushto 128 pus ps - language Portuguese 129 por pt - language Quechua 130 que qu - language Romansh 131 roh rm - language Kirundi 132 run rn - language Romanian 133 ron rum ro - language Russian 134 rus ru - language Sanskrit (Saá¹ská¹›ta) 135 san sa - language Sardinian 136 srd sc - language Sindhi 137 snd sd - language Northern Sami 138 sme se - language Samoan 139 smo sm - language Sango 140 sag sg - language Serbian 141 srp scc sr - language Scottish Gaelic, Gaelic 142 gla gd - language Shona 143 sna sn - language Sinhala, Sinhalese 144 sin si - language Slovak 145 slk slo sk - language Slovene 146 slv sl - language Somali 147 som so - language Southern Sotho 148 sot st - language Spanish, Castilian 149 spa es - language Sundanese 150 sun su - language Swahili 151 swa sw - language Swati 152 ssw ss - language Swedish 153 swe sv - language Tamil 154 tam ta - language Telugu 155 tel te - language Tajik 156 tgk tg - language Thai 157 tha th - language Tigrinya 158 tir ti - language Tibetan Standard, Tibetan, Central 159 tib bod bo - language Turkmen 160 tuk tk - language Tagalog 161 tgl tl - language Tswana 162 tsn tn - language Tonga (Tonga Islands) 163 ton to - language Turkish 164 tur tr - language Tsonga 165 tso ts - language Tatar 166 tat tt - language Twi 167 twi tw - language Tahitian 168 tah ty - language Uyghur, Uighur 169 uig ug - language Ukrainian 170 ukr uk - language Urdu 171 urd ur - language Uzbek 172 uzb uz - language Venda 173 ven ve - language Vietnamese 174 vie vi - language Volapük 175 vol vo - language Walloon 176 wln wa - language Welsh 177 cym wel cy - language Wolof 178 wol wo - language Western Frisian 179 fry fy - language Xhosa 180 xho xh - language Yiddish 181 yid yi - language Yoruba 182 yor yo - language Zhuang, Chuang 183 zha za - language Zulu 184 zul zu - language Not applicable 185 +#controlledVocabulary DatasetField Value identifier displayOrder + subject Agricultural Sciences D01 0 + subject Arts and Humanities D0 1 + subject Astronomy and Astrophysics D1 2 + subject Business and Management D2 3 + subject Chemistry D3 4 + subject Computer and Information Science D7 5 + subject Earth and Environmental Sciences D4 6 + subject Engineering D5 7 + subject Law D8 8 + subject Mathematical Sciences D9 9 + subject Medicine, Health and Life Sciences D6 10 + subject Physics D10 11 + subject Social Sciences D11 12 + subject Other D12 13 + publicationIDType ark 0 + publicationIDType arXiv 1 + publicationIDType bibcode 2 + publicationIDType cstr 3 + publicationIDType doi 4 + publicationIDType ean13 5 + publicationIDType eissn 6 + publicationIDType handle 7 + publicationIDType isbn 8 + publicationIDType issn 9 + publicationIDType istc 10 + publicationIDType lissn 11 + publicationIDType lsid 12 + publicationIDType pmid 13 + publicationIDType purl 14 + publicationIDType upc 15 + publicationIDType url 16 + publicationIDType urn 17 + publicationIDType DASH-NRS 18 + contributorType Data Collector 0 + contributorType Data Curator 1 + contributorType Data Manager 2 + contributorType Editor 3 + contributorType Funder 4 + contributorType Hosting Institution 5 + contributorType Project Leader 6 + contributorType Project Manager 7 + contributorType Project Member 8 + contributorType Related Person 9 + contributorType Researcher 10 + contributorType Research Group 11 + contributorType Rights Holder 12 + contributorType Sponsor 13 + contributorType Supervisor 14 + contributorType Work Package Leader 15 + contributorType Other 16 + authorIdentifierScheme ORCID 0 + authorIdentifierScheme ISNI 1 + authorIdentifierScheme LCNA 2 + authorIdentifierScheme VIAF 3 + authorIdentifierScheme GND 4 + authorIdentifierScheme DAI 5 + authorIdentifierScheme ResearcherID 6 + authorIdentifierScheme ScopusID 7 + language Abkhaz 0 + language Afar 1 aar aa + language Afrikaans 2 afr af + language Akan 3 aka ak + language Albanian 4 sqi alb sq + language Amharic 5 amh am + language Arabic 6 ara ar + language Aragonese 7 arg an + language Armenian 8 hye arm hy + language Assamese 9 asm as + language Avaric 10 ava av + language Avestan 11 ave ae + language Aymara 12 aym ay + language Azerbaijani 13 aze az + language Bambara 14 bam bm + language Bashkir 15 bak ba + language Basque 16 eus baq eu + language Belarusian 17 bel be + language Bengali, Bangla 18 ben bn + language Bihari 19 bih bh + language Bislama 20 bis bi + language Bosnian 21 bos bs + language Breton 22 bre br + language Bulgarian 23 bul bg + language Burmese 24 mya bur my + language Catalan,Valencian 25 cat ca + language Chamorro 26 cha ch + language Chechen 27 che ce + language Chichewa, Chewa, Nyanja 28 nya ny + language Chinese 29 zho chi zh + language Chuvash 30 chv cv + language Cornish 31 cor kw + language Corsican 32 cos co + language Cree 33 cre cr + language Croatian 34 hrv src hr + language Czech 35 ces cze cs + language Danish 36 dan da + language Divehi, Dhivehi, Maldivian 37 div dv + language Dutch 38 nld dut nl + language Dzongkha 39 dzo dz + language English 40 eng en + language Esperanto 41 epo eo + language Estonian 42 est et + language Ewe 43 ewe ee + language Faroese 44 fao fo + language Fijian 45 fij fj + language Finnish 46 fin fi + language French 47 fra fre fr + language Fula, Fulah, Pulaar, Pular 48 ful ff + language Galician 49 glg gl + language Georgian 50 kat geo ka + language German 51 deu ger de + language Greek (modern) 52 gre ell el + language Guaraní 53 grn gn + language Gujarati 54 guj gu + language Haitian, Haitian Creole 55 hat ht + language Hausa 56 hau ha + language Hebrew (modern) 57 heb he + language Herero 58 her hz + language Hindi 59 hin hi + language Hiri Motu 60 hmo ho + language Hungarian 61 hun hu + language Interlingua 62 ina ia + language Indonesian 63 ind id + language Interlingue 64 ile ie + language Irish 65 gle ga + language Igbo 66 ibo ig + language Inupiaq 67 ipk ik + language Ido 68 ido io + language Icelandic 69 isl ice is + language Italian 70 ita it + language Inuktitut 71 iku iu + language Japanese 72 jpn ja + language Javanese 73 jav jv + language Kalaallisut, Greenlandic 74 kal kl + language Kannada 75 kan kn + language Kanuri 76 kau kr + language Kashmiri 77 kas ks + language Kazakh 78 kaz kk + language Khmer 79 khm km + language Kikuyu, Gikuyu 80 kik ki + language Kinyarwanda 81 kin rw + language Kyrgyz 82 + language Komi 83 kom kv + language Kongo 84 kon kg + language Korean 85 kor ko + language Kurdish 86 kur ku + language Kwanyama, Kuanyama 87 kua kj + language Latin 88 lat la + language Luxembourgish, Letzeburgesch 89 ltz lb + language Ganda 90 lug lg + language Limburgish, Limburgan, Limburger 91 lim li + language Lingala 92 lin ln + language Lao 93 lao lo + language Lithuanian 94 lit lt + language Luba-Katanga 95 lub lu + language Latvian 96 lav lv + language Manx 97 glv gv + language Macedonian 98 mkd mac mk + language Malagasy 99 mlg mg + language Malay 100 may msa ms + language Malayalam 101 mal ml + language Maltese 102 mlt mt + language MÄori 103 mao mri mi + language Marathi (MarÄá¹­hÄ«) 104 mar mr + language Marshallese 105 mah mh + language Mixtepec Mixtec 106 mix + language Mongolian 107 mon mn + language Nauru 108 nau na + language Navajo, Navaho 109 nav nv + language Northern Ndebele 110 nde nd + language Nepali 111 nep ne + language Ndonga 112 ndo ng + language Norwegian BokmÃ¥l 113 nob nb + language Norwegian Nynorsk 114 nno nn + language Norwegian 115 nor no + language Nuosu 116 + language Southern Ndebele 117 nbl nr + language Occitan 118 oci oc + language Ojibwe, Ojibwa 119 oji oj + language Old Church Slavonic,Church Slavonic,Old Bulgarian 120 chu cu + language Oromo 121 orm om + language Oriya 122 ori or + language Ossetian, Ossetic 123 oss os + language Panjabi, Punjabi 124 pan pa + language PÄli 125 pli pi + language Persian (Farsi) 126 per fas fa + language Polish 127 pol pl + language Pashto, Pushto 128 pus ps + language Portuguese 129 por pt + language Quechua 130 que qu + language Romansh 131 roh rm + language Kirundi 132 run rn + language Romanian 133 ron rum ro + language Russian 134 rus ru + language Sanskrit (Saá¹ská¹›ta) 135 san sa + language Sardinian 136 srd sc + language Sindhi 137 snd sd + language Northern Sami 138 sme se + language Samoan 139 smo sm + language Sango 140 sag sg + language Serbian 141 srp scc sr + language Scottish Gaelic, Gaelic 142 gla gd + language Shona 143 sna sn + language Sinhala, Sinhalese 144 sin si + language Slovak 145 slk slo sk + language Slovene 146 slv sl + language Somali 147 som so + language Southern Sotho 148 sot st + language Spanish, Castilian 149 spa es + language Sundanese 150 sun su + language Swahili 151 swa sw + language Swati 152 ssw ss + language Swedish 153 swe sv + language Tamil 154 tam ta + language Telugu 155 tel te + language Tajik 156 tgk tg + language Thai 157 tha th + language Tigrinya 158 tir ti + language Tibetan Standard, Tibetan, Central 159 tib bod bo + language Turkmen 160 tuk tk + language Tagalog 161 tgl tl + language Tswana 162 tsn tn + language Tonga (Tonga Islands) 163 ton to + language Turkish 164 tur tr + language Tsonga 165 tso ts + language Tatar 166 tat tt + language Twi 167 twi tw + language Tahitian 168 tah ty + language Uyghur, Uighur 169 uig ug + language Ukrainian 170 ukr uk + language Urdu 171 urd ur + language Uzbek 172 uzb uz + language Venda 173 ven ve + language Vietnamese 174 vie vi + language Volapük 175 vol vo + language Walloon 176 wln wa + language Welsh 177 cym wel cy + language Wolof 178 wol wo + language Western Frisian 179 fry fy + language Xhosa 180 xho xh + language Yiddish 181 yid yi + language Yoruba 182 yor yo + language Zhuang, Chuang 183 zha za + language Zulu 184 zul zu + language Not applicable 185 diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index f7f0e30ea6e..458803e0c92 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -1337,6 +1337,7 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th List> producers = new ArrayList<>(); List> grants = new ArrayList<>(); List> software = new ArrayList<>(); + List prodPlac = new ArrayList<>(); for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { if (event == XMLStreamConstants.START_ELEMENT) { @@ -1352,9 +1353,7 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th } else if (xmlr.getLocalName().equals("prodDate")) { citation.getFields().add(FieldDTO.createPrimitiveFieldDTO("productionDate", parseDate(xmlr, "prodDate"))); } else if (xmlr.getLocalName().equals("prodPlac")) { - List prodPlac = new ArrayList<>(); prodPlac.add(parseText(xmlr, "prodPlac")); - citation.getFields().add(FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.productionPlace, prodPlac)); } else if (xmlr.getLocalName().equals("software")) { HashSet set = new HashSet<>(); addToSet(set,"softwareVersion", xmlr.getAttributeValue(null, "version")); @@ -1387,6 +1386,9 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th if (producers.size()>0) { citation.getFields().add(FieldDTO.createMultipleCompoundFieldDTO("producer", producers)); } + if (prodPlac.size() > 0) { + citation.getFields().add(FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.productionPlace, prodPlac)); + } return; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index eb53473d4d9..4ea90ea6199 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -895,8 +895,13 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT } } } - writeFullElement(xmlw, "prodDate", dto2Primitive(version, DatasetFieldConstant.productionDate)); - writeFullElement(xmlw, "prodPlac", dto2Primitive(version, DatasetFieldConstant.productionPlace)); + writeFullElement(xmlw, "prodDate", dto2Primitive(version, DatasetFieldConstant.productionDate)); + + FieldDTO prodPlac = dto2FieldDTO( version, DatasetFieldConstant.productionPlace, "citation" ); + if (prodPlac != null) { + writeMultipleElement(xmlw, "prodPlac", prodPlac, null); + } + writeSoftwareElement(xmlw, version); writeGrantElement(xmlw, version); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 2ba06314ddb..14185b97e9e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -758,7 +758,7 @@ public void testLoadMetadataBlock_NoErrorPath() { assertEquals(1, data.size()); List> addedElements = data.get("added"); //Note -test depends on the number of elements in the production citation block, so any changes to the # of elements there can break this test - assertEquals(323, addedElements.size()); + assertEquals(322, addedElements.size()); Map statistics = new HashMap<>(); for (Map unit : addedElements) { diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json index 96f058b1b02..822623f721a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json +++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json @@ -409,9 +409,9 @@ }, { "typeName": "productionPlace", - "multiple": false, + "multiple": true, "typeClass": "primitive", - "value": "ProductionPlace" + "value": ["ProductionPlace"] }, { "typeName": "contributor", From 89dbc7a87407a095670fa66dda87616b8500959d Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 16 Mar 2023 13:19:33 -0400 Subject: [PATCH 0064/1525] restore back --- src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 14185b97e9e..2ba06314ddb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -758,7 +758,7 @@ public void testLoadMetadataBlock_NoErrorPath() { assertEquals(1, data.size()); List> addedElements = data.get("added"); //Note -test depends on the number of elements in the production citation block, so any changes to the # of elements there can break this test - assertEquals(322, addedElements.size()); + assertEquals(323, addedElements.size()); Map statistics = new HashMap<>(); for (Map unit : addedElements) { From ef6aaec2a37ae2ef246db6535a8f44350131ffdc Mon Sep 17 00:00:00 2001 From: "Balazs E. Pataki" Date: Fri, 17 Mar 2023 16:22:48 +0100 Subject: [PATCH 0065/1525] Fix placement of allowedApiCalls in example manifests allowedApiCalls should be at the top level, not inside toolParameters. --- .../external-tools/dynamicDatasetTool.json | 20 +++++++++---------- .../root/external-tools/fabulousFileTool.json | 18 ++++++++--------- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json index 47413c8a625..22dd6477cb4 100644 --- a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json +++ b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json @@ -14,14 +14,14 @@ { "locale":"{localeCode}" } - ], - "allowedApiCalls": [ - { - "name":"retrieveDatasetJson", - "httpMethod":"GET", - "urlTemplate":"/api/v1/datasets/{datasetId}", - "timeOut":10 - } - ] - } + ] + }, + "allowedApiCalls": [ + { + "name":"retrieveDatasetJson", + "httpMethod":"GET", + "urlTemplate":"/api/v1/datasets/{datasetId}", + "timeOut":10 + } + ] } diff --git a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json index 1c132576099..2b6a0b8e092 100644 --- a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json +++ b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json @@ -21,14 +21,14 @@ { "locale":"{localeCode}" } - ], - "allowedApiCalls": [ - { - "name":"retrieveDataFile", - "httpMethod":"GET", - "urlTemplate":"/api/v1/access/datafile/{fileId}", - "timeOut":270 - } ] - } + }, + "allowedApiCalls": [ + { + "name":"retrieveDataFile", + "httpMethod":"GET", + "urlTemplate":"/api/v1/access/datafile/{fileId}", + "timeOut":270 + } + ] } From 2805d8aa07fbfa75ebaea2fca25942ffadf75639 Mon Sep 17 00:00:00 2001 From: "Balazs E. Pataki" Date: Fri, 17 Mar 2023 16:24:41 +0100 Subject: [PATCH 0066/1525] Add missing break to DATASET case Without this it also evaluates the FILE case causing NPE when dataFile is accessed. --- .../harvard/iq/dataverse/externaltools/ExternalToolHandler.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java index 88a51017b75..dac046373ba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java @@ -111,6 +111,7 @@ public String handleRequest(boolean preview) { case DATASET: callback=SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/datasets/" + dataset.getId() + "/versions/:latest/toolparams/" + externalTool.getId(); + break; case FILE: callback= SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/files/" + dataFile.getId() + "/metadata/" + fileMetadata.getId() + "/toolparams/" From 10a475317771227b23263170ad3c97232764d14d Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 20 Mar 2023 10:48:20 -0400 Subject: [PATCH 0067/1525] An extra check, to disable the upload component right away, if the quota is already full/exceeded. #9361 --- src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java | 4 ++++ src/main/webapp/editFilesFragment.xhtml | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 420642f2fa5..c39e6f62ce2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -542,6 +542,10 @@ public String initCreateMode(String modeToken, DatasetVersion version, MutableBo saveEnabled = true; return null; } + + public boolean isQuotaExceeded() { + return systemConfig.isStorageQuotasEnforced() && datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes() == 0; + } public String init() { // default mode should be EDIT diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 77f7aab2f76..834ca597892 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -148,7 +148,7 @@ dragDropSupport="true" auto="#{!(systemConfig.directUploadEnabled(EditDatafilesPage.dataset))}" multiple="#{datasetPage || EditDatafilesPage.allowMultipleFileUpload()}" - disabled="#{lockedFromEdits || !(datasetPage || EditDatafilesPage.showFileUploadComponent()) }" + disabled="#{lockedFromEdits || !(datasetPage || EditDatafilesPage.showFileUploadComponent()) || EditDatafilesPage.isQuotaExceeded()}" listener="#{EditDatafilesPage.handleFileUpload}" process="filesTable" update=":datasetForm:filesTable, @([id$=filesButtons])" From 68f9a49447f228efbbde547d28669c66627fb72b Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 6 Mar 2023 13:30:06 -0500 Subject: [PATCH 0068/1525] use configured fixity algorithm in direct upload --- .../edu/harvard/iq/dataverse/api/Files.java | 6 ++ src/main/webapp/resources/js/fileupload.js | 101 ++++++++++++------ 2 files changed, 73 insertions(+), 34 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 44c7e944556..7bbdea9df73 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -760,4 +760,10 @@ public Response getExternalToolFMParams(@Context ContainerRequestContext crc, @P eth = new ExternalToolHandler(externalTool, target.getDataFile(), apiToken, target, locale); return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())))); } + + @GET + @Path("fixityAlgorithm") + public Response getFixityAlgorithm() { + return ok(systemConfig.getFileFixityChecksumAlgorithm().toString()); + } } diff --git a/src/main/webapp/resources/js/fileupload.js b/src/main/webapp/resources/js/fileupload.js index a478235c09f..8c0ccbea88d 100644 --- a/src/main/webapp/resources/js/fileupload.js +++ b/src/main/webapp/resources/js/fileupload.js @@ -17,6 +17,8 @@ var directUploadEnabled = false; var directUploadReport = true; +var fixityAlgorithm; + //How many files have started being processed but aren't yet being uploaded var filesInProgress = 0; //The # of the current file being processed (total number of files for which upload has at least started) @@ -34,53 +36,84 @@ var finishFile = (function() { function setupDirectUpload(enabled) { - if (enabled) { - directUploadEnabled = true; - //An indicator as to which version is being used - should keep updated. - console.log('Dataverse Direct Upload for v5.0'); - $('.ui-fileupload-upload').hide(); - $('.ui-fileupload-cancel').hide(); + if (enabled) { + directUploadEnabled = true; + //An indicator as to which version is being used - should keep updated. + console.log('Dataverse Direct Upload for v5.0'); + $('.ui-fileupload-upload').hide(); + $('.ui-fileupload-cancel').hide(); + + fetch("api/files/fixityAlgorithm") + .then((response) => { + var fixityString = "MD5"; + if (!response.ok) { + console.log("Did not get fixityAlgorithm from Dataverse, using MD5"); + } else { + fixityAlgorithm = response.json().message; + } + switch (fixityString) { + case 'MD5': + fixityAlgortihm = CryptoJs.algo.MD5; + break; + case 'SHA-1': + fixityAlgortihm = CryptoJs.algo.SHA1; + break; + case 'SHA-256': + fixityAlgortihm = CryptoJs.algo.SHA256; + break; + case 'SHA-512': + fixityAlgortihm = CryptoJs.algo.SHA512; + break; + default: + console.log('$(fixityString) is not supported, using MD5 as the fixity Algorithm'); + fixityAlgortihm = CryptoJs.algo.MD5; + } + + }) + .then(() => { + //Catch files entered via upload dialog box. Since this 'select' widget is replaced by PF, we need to add a listener again when it is replaced var fileInput = document.getElementById('datasetForm:fileUpload_input'); if (fileInput !== null) { - fileInput.addEventListener('change', function(event) { - fileList = []; - for (var i = 0; i < fileInput.files.length; i++) { - queueFileForDirectUpload(fileInput.files[i]); - } - }, { once: false }); + fileInput.addEventListener('change', function(event) { + fileList = []; + for (var i = 0; i < fileInput.files.length; i++) { + queueFileForDirectUpload(fileInput.files[i]); + } + }, { once: false }); } //Add support for drag and drop. Since the fileUploadForm is not replaced by PF, catching changes with a mutationobserver isn't needed var fileDropWidget = document.getElementById('datasetForm:fileUpload'); fileDropWidget.addEventListener('drop', function(event) { - fileList = []; - for (var i = 0; i < event.dataTransfer.files.length; i++) { - queueFileForDirectUpload(event.dataTransfer.files[i]); - } + fileList = []; + for (var i = 0; i < event.dataTransfer.files.length; i++) { + queueFileForDirectUpload(event.dataTransfer.files[i]); + } }, { once: false }); var config = { childList: true }; var callback = function(mutations) { - mutations.forEach(function(mutation) { - for (i = 0; i < mutation.addedNodes.length; i++) { - //Add a listener on any replacement file 'select' widget - if (mutation.addedNodes[i].id == 'datasetForm:fileUpload_input') { - fileInput = mutation.addedNodes[i]; - mutation.addedNodes[i].addEventListener('change', function(event) { - for (var j = 0; j < mutation.addedNodes[i].files.length; j++) { - queueFileForDirectUpload(mutation.addedNodes[i].files[j]); - } - }, { once: false }); - } - } - }); + mutations.forEach(function(mutation) { + for (i = 0; i < mutation.addedNodes.length; i++) { + //Add a listener on any replacement file 'select' widget + if (mutation.addedNodes[i].id == 'datasetForm:fileUpload_input') { + fileInput = mutation.addedNodes[i]; + mutation.addedNodes[i].addEventListener('change', function(event) { + for (var j = 0; j < mutation.addedNodes[i].files.length; j++) { + queueFileForDirectUpload(mutation.addedNodes[i].files[j]); + } + }, { once: false }); + } + } + }); }; if (observer2 != null) { - observer2.disconnect(); + observer2.disconnect(); } observer2 = new MutationObserver(callback); observer2.observe(document.getElementById('datasetForm:fileUpload'), config); - } //else ? + }); + }//else ? } function sleep(ms) { @@ -625,9 +658,9 @@ function readChunked(file, chunkCallback, endCallback) { function getMD5(blob, cbProgress) { return new Promise((resolve, reject) => { - var md5 = CryptoJS.algo.MD5.create(); + var fixity = fixityAlgorithm.create(); readChunked(blob, (chunk, offs, total) => { - md5.update(CryptoJS.enc.Latin1.parse(chunk)); + fixity.update(CryptoJS.enc.Latin1.parse(chunk)); if (cbProgress) { cbProgress(offs / total); } @@ -636,7 +669,7 @@ function getMD5(blob, cbProgress) { reject(err); } else { // TODO: Handle errors - var hash = md5.finalize(); + var hash = fixity.finalize(); var hashHex = hash.toString(CryptoJS.enc.Hex); resolve(hashHex); } From 37e969f0296c207aa373cb960bd4a3c167d007c7 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 7 Mar 2023 15:14:27 -0500 Subject: [PATCH 0069/1525] support all checksum algs in direct upload update crypto-js to 4.0.0 use local copies of crypto-js files only load js when needed add api call to get fixity/checksum alg --- src/main/webapp/editFilesFragment.xhtml | 21 +- .../webapp/provenance-popups-fragment.xhtml | 5 +- .../resources/js/crypto-js/4.0.0/core.js | 797 ++++++++++ .../resources/js/crypto-js/4.0.0/md5.js | 268 ++++ .../resources/js/crypto-js/4.0.0/sha1.js | 150 ++ .../resources/js/crypto-js/4.0.0/sha256.js | 199 +++ .../resources/js/crypto-js/4.0.0/sha512.js | 326 ++++ .../resources/js/crypto-js/4.0.0/x64-core.js | 304 ++++ src/main/webapp/resources/js/fileupload.js | 1368 +++++++++-------- 9 files changed, 2754 insertions(+), 684 deletions(-) create mode 100644 src/main/webapp/resources/js/crypto-js/4.0.0/core.js create mode 100644 src/main/webapp/resources/js/crypto-js/4.0.0/md5.js create mode 100644 src/main/webapp/resources/js/crypto-js/4.0.0/sha1.js create mode 100644 src/main/webapp/resources/js/crypto-js/4.0.0/sha256.js create mode 100644 src/main/webapp/resources/js/crypto-js/4.0.0/sha512.js create mode 100644 src/main/webapp/resources/js/crypto-js/4.0.0/x64-core.js diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index a4e635b8c14..1a049331ae4 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -11,9 +11,20 @@ xmlns:o="http://omnifaces.org/ui" xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs"> - - - + + + + + + + + + + + + + +
      @@ -109,7 +120,7 @@ $(document).ready(function () { uploadWidgetDropMsg(); - setupDirectUpload(#{systemConfig.directUploadEnabled(EditDatafilesPage.dataset)}); + #{useDirectUpload ? 'setupDirectUpload(true);':''} }); //]]> @@ -584,7 +595,7 @@

      #{EditDatafilesPage.warningMessageForFileTypeDifferentPopUp}

    "); - Long objectId = null; userNotificationService.sendNotification( authenticatedUser, Timestamp.from(Instant.now()), !ingestWithErrors ? UserNotification.Type.INGESTCOMPLETED : UserNotification.Type.INGESTCOMPLETEDWITHERRORS, - objectId, + ingestMessage.getDatasetId(), sbIngestedFiles.toString(), true ); From 6ca073e232b25b7deb902dd03625a916b9c6596a Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 5 May 2023 13:06:06 +0200 Subject: [PATCH 0128/1525] added a realease note --- doc/release-notes/9558-async-indexing.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/9558-async-indexing.md diff --git a/doc/release-notes/9558-async-indexing.md b/doc/release-notes/9558-async-indexing.md new file mode 100644 index 00000000000..a44eac1ff75 --- /dev/null +++ b/doc/release-notes/9558-async-indexing.md @@ -0,0 +1,3 @@ +Performance improvements, especially for large datasets containing thousands of files. +Uploading files one by one to the dataset is much faster now, allowing uploading thousands of files in an acceptable timeframe. Not only uploading a file, but all edit operations on datasets containing many files, got faster. +Performance tweaks include indexing of the datasets in the background and optimizations in the amount of the indexing operations needed. Furthermore, updates to the dateset no longer wait for ingesting to finish. Ingesting was already running in the background, but it took a lock, preventing updating the dataset and degrading performance for datasets containing many files. \ No newline at end of file From 2c4c927cc8f20d53ee1aaaf1979b793ee53f9b3f Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 5 May 2023 14:13:02 -0400 Subject: [PATCH 0129/1525] add token --- .../dataaccess/GlobusOverlayAccessIO.java | 171 +++++++++++------- .../iq/dataverse/settings/JvmSettings.java | 1 + 2 files changed, 111 insertions(+), 61 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index fe62e25ad6f..050b9ddc176 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -7,6 +7,7 @@ import edu.harvard.iq.dataverse.datavariable.DataVariable; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.UrlSignerUtil; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import java.io.FileNotFoundException; import java.io.IOException; @@ -31,6 +32,7 @@ import java.util.logging.Logger; import org.apache.commons.lang3.NotImplementedException; +import org.apache.http.client.ClientProtocolException; import org.apache.http.client.config.CookieSpecs; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; @@ -49,6 +51,7 @@ import org.apache.http.ssl.SSLContextBuilder; import org.apache.http.util.EntityUtils; +import javax.json.JsonObject; import javax.net.ssl.SSLContext; /** @@ -58,8 +61,8 @@ /* * Globus Overlay Driver * - * StorageIdentifier format: - * :///// + * StorageIdentifier format: :///// */ public class GlobusOverlayAccessIO extends StorageIO { @@ -68,6 +71,7 @@ public class GlobusOverlayAccessIO extends StorageIO { private StorageIO baseStore = null; private String path = null; private String endpointWithBasePath = null; + private String globusToken = null; private static HttpClientContext localContext = HttpClientContext.create(); private PoolingHttpClientConnectionManager cm = null; @@ -86,7 +90,7 @@ public GlobusOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) logger.fine("Parsing storageidentifier: " + dvObject.getStorageIdentifier()); path = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2); validatePath(path); - + logger.fine("Relative path: " + path); } @@ -99,18 +103,17 @@ public GlobusOverlayAccessIO(String storageLocation, String driverId) throws IOE validatePath(path); logger.fine("Relative path: " + path); } - + private void validatePath(String relPath) throws IOException { try { URI absoluteURI = new URI(endpointWithBasePath + "/" + relPath); - if(!absoluteURI.normalize().toString().startsWith(endpointWithBasePath)) { + if (!absoluteURI.normalize().toString().startsWith(endpointWithBasePath)) { throw new IOException("storageidentifier doesn't start with " + this.driverId + "'s endpoint/basePath"); } - } catch(URISyntaxException use) { + } catch (URISyntaxException use) { throw new IOException("Could not interpret storageidentifier in remote store " + this.driverId); } - } - + } @Override public void open(DataAccessOption... options) throws IOException { @@ -181,37 +184,64 @@ public void open(DataAccessOption... options) throws IOException { } } + // Call the Globus API to get the file size private long getSizeFromGlobus() { - throw new NotImplementedException(); - /* - long size = -1; - HttpHead head = new HttpHead(endpointWithBasePath + "/" + path); + // Construct Globus URL + URI absoluteURI = null; try { - CloseableHttpResponse response = getSharedHttpClient().execute(head, localContext); - - try { - int code = response.getStatusLine().getStatusCode(); - logger.fine("Response for HEAD: " + code); - switch (code) { - case 200: - Header[] headers = response.getHeaders(HTTP.CONTENT_LEN); - logger.fine("Num headers: " + headers.length); - String sizeString = response.getHeaders(HTTP.CONTENT_LEN)[0].getValue(); - logger.fine("Content-Length: " + sizeString); - size = Long.parseLong(response.getHeaders(HTTP.CONTENT_LEN)[0].getValue()); - logger.fine("Found file size: " + size); - break; - default: - logger.warning("Response from " + head.getURI().toString() + " was " + code); - } - } finally { - EntityUtils.consume(response.getEntity()); + int filenameStart = path.lastIndexOf("/") + 1; + int pathStart = endpointWithBasePath.indexOf("/") + 1; + + String directoryPath = (pathStart > 0 ? endpointWithBasePath.substring(pathStart) : "") + + path.substring(0, filenameStart); + String filename = path.substring(filenameStart); + String endpoint = pathStart > 0 ? endpointWithBasePath.substring(0, pathStart - 1) : endpointWithBasePath; + + absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint + "/ls?path=" + path + "&filter=name:" + filename); + HttpGet get = new HttpGet(absoluteURI); + String token = JvmSettings.GLOBUS_TOKEN.lookup(driverId); + logger.info("Token is " + token); + get.addHeader("Authorization", "Bearer " + token); + CloseableHttpResponse response = getSharedHttpClient().execute(get, localContext); + if (response.getStatusLine().getStatusCode() == 200) { + //Get reponse as string + String responseString = EntityUtils.toString(response.getEntity()); + logger.fine("Response from " + get.getURI().toString() + " is: " + responseString); + JsonObject responseJson = JsonUtil.getJsonObject(responseString); + return (long) responseJson.getInt("size"); + } else { + logger.warning("Response from " + get.getURI().toString() + " was " + response.getStatusLine().getStatusCode()); + logger.info(EntityUtils.toString(response.getEntity())); } + } catch (URISyntaxException e) { + // Should have been caught in validatePath + e.printStackTrace(); + } catch (ClientProtocolException e) { + // TODO Auto-generated catch block + e.printStackTrace(); } catch (IOException e) { - logger.warning(e.getMessage()); + // TODO Auto-generated catch block + e.printStackTrace(); } - return size; - */ + return -1; + + /* + * long size = -1; HttpHead head = new HttpHead(endpointWithBasePath + "/" + + * path); try { CloseableHttpResponse response = + * getSharedHttpClient().execute(head, localContext); + * + * try { int code = response.getStatusLine().getStatusCode(); + * logger.fine("Response for HEAD: " + code); switch (code) { case 200: Header[] + * headers = response.getHeaders(HTTP.CONTENT_LEN); logger.fine("Num headers: " + * + headers.length); String sizeString = + * response.getHeaders(HTTP.CONTENT_LEN)[0].getValue(); + * logger.fine("Content-Length: " + sizeString); size = + * Long.parseLong(response.getHeaders(HTTP.CONTENT_LEN)[0].getValue()); + * logger.fine("Found file size: " + size); break; default: + * logger.warning("Response from " + head.getURI().toString() + " was " + code); + * } } finally { EntityUtils.consume(response.getEntity()); } } catch + * (IOException e) { logger.warning(e.getMessage()); } return size; + */ } @Override @@ -360,8 +390,9 @@ public String getStorageLocation() throws IOException { String fullStorageLocation = dvObject.getStorageIdentifier(); logger.fine("storageidentifier: " + fullStorageLocation); int driverIndex = fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR); - if(driverIndex >=0) { - fullStorageLocation = fullStorageLocation.substring(fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); + if (driverIndex >= 0) { + fullStorageLocation = fullStorageLocation + .substring(fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); } if (this.getDvObject() instanceof Dataset) { throw new IOException("RemoteOverlayAccessIO: Datasets are not a supported dvObject"); @@ -411,7 +442,7 @@ public boolean downloadRedirectEnabled() { } return false; } - + public boolean downloadRedirectEnabled(String auxObjectTag) { return baseStore.downloadRedirectEnabled(auxObjectTag); } @@ -469,9 +500,10 @@ private void configureStores(DataAccessRequest req, String driverId, String stor if (baseStore == null) { String baseDriverId = getBaseStoreIdFor(driverId); String fullStorageLocation = null; - String baseDriverType = System.getProperty("dataverse.files." + baseDriverId + ".type", DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); - - if(dvObject instanceof Dataset) { + String baseDriverType = System.getProperty("dataverse.files." + baseDriverId + ".type", + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + + if (dvObject instanceof Dataset) { baseStore = DataAccess.getStorageIO(dvObject, req, baseDriverId); } else { if (this.getDvObject() != null) { @@ -486,8 +518,8 @@ private void configureStores(DataAccessRequest req, String driverId, String stor break; case DataAccess.FILE: fullStorageLocation = baseDriverId + DataAccess.SEPARATOR - + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + "/" - + fullStorageLocation; + + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + + "/" + fullStorageLocation; break; default: logger.warning("Not Implemented: RemoteOverlay store with base store type: " @@ -497,12 +529,12 @@ private void configureStores(DataAccessRequest req, String driverId, String stor } else if (storageLocation != null) { // ://// - //remoteDriverId:// is removed if coming through directStorageIO + // remoteDriverId:// is removed if coming through directStorageIO int index = storageLocation.indexOf(DataAccess.SEPARATOR); - if(index > 0) { + if (index > 0) { storageLocation = storageLocation.substring(index + DataAccess.SEPARATOR.length()); } - //THe base store needs the baseStoreIdentifier and not the relative URL + // THe base store needs the baseStoreIdentifier and not the relative URL fullStorageLocation = storageLocation.substring(0, storageLocation.indexOf("//")); switch (baseDriverType) { @@ -513,8 +545,8 @@ private void configureStores(DataAccessRequest req, String driverId, String stor break; case DataAccess.FILE: fullStorageLocation = baseDriverId + DataAccess.SEPARATOR - + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + "/" - + fullStorageLocation; + + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + + "/" + fullStorageLocation; break; default: logger.warning("Not Implemented: RemoteOverlay store with base store type: " @@ -530,37 +562,41 @@ private void configureStores(DataAccessRequest req, String driverId, String stor } remoteStoreName = System.getProperty("dataverse.files." + this.driverId + ".remote-store-name"); try { - remoteStoreUrl = new URL(System.getProperty("dataverse.files." + this.driverId + ".remote-store-url")); - } catch(MalformedURLException mfue) { + remoteStoreUrl = new URL(System.getProperty("dataverse.files." + this.driverId + ".remote-store-url")); + } catch (MalformedURLException mfue) { logger.fine("Unable to read remoteStoreUrl for driver: " + this.driverId); } } - //Convenience method to assemble the path, starting with the DOI authority/identifier/, that is needed to create a base store via DataAccess.getDirectStorageIO - the caller has to add the store type specific prefix required. + // Convenience method to assemble the path, starting with the DOI + // authority/identifier/, that is needed to create a base store via + // DataAccess.getDirectStorageIO - the caller has to add the store type specific + // prefix required. private String getStoragePath() throws IOException { String fullStoragePath = dvObject.getStorageIdentifier(); logger.fine("storageidentifier: " + fullStoragePath); int driverIndex = fullStoragePath.lastIndexOf(DataAccess.SEPARATOR); - if(driverIndex >=0) { - fullStoragePath = fullStoragePath.substring(fullStoragePath.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); + if (driverIndex >= 0) { + fullStoragePath = fullStoragePath + .substring(fullStoragePath.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length()); } int suffixIndex = fullStoragePath.indexOf("//"); - if(suffixIndex >=0) { - fullStoragePath = fullStoragePath.substring(0, suffixIndex); + if (suffixIndex >= 0) { + fullStoragePath = fullStoragePath.substring(0, suffixIndex); } if (this.getDvObject() instanceof Dataset) { fullStoragePath = this.getDataset().getAuthorityForFileStorage() + "/" + this.getDataset().getIdentifierForFileStorage() + "/" + fullStoragePath; } else if (this.getDvObject() instanceof DataFile) { fullStoragePath = this.getDataFile().getOwner().getAuthorityForFileStorage() + "/" - + this.getDataFile().getOwner().getIdentifierForFileStorage() + "/" + fullStoragePath; - }else if (dvObject instanceof Dataverse) { + + this.getDataFile().getOwner().getIdentifierForFileStorage() + "/" + fullStoragePath; + } else if (dvObject instanceof Dataverse) { throw new IOException("RemoteOverlayAccessIO: Dataverses are not a supported dvObject"); } logger.fine("fullStoragePath: " + fullStoragePath); return fullStoragePath; } - + public CloseableHttpClient getSharedHttpClient() { if (httpclient == null) { try { @@ -622,11 +658,11 @@ protected static boolean isValidIdentifier(String driverId, String storageId) { String baseUri = System.getProperty("dataverse.files." + driverId + ".base-uri"); try { URI absoluteURI = new URI(baseUri + "/" + urlPath); - if(!absoluteURI.normalize().toString().startsWith(baseUri)) { + if (!absoluteURI.normalize().toString().startsWith(baseUri)) { logger.warning("storageidentifier doesn't start with " + driverId + "'s base-url: " + storageId); return false; } - } catch(URISyntaxException use) { + } catch (URISyntaxException use) { logger.warning("Could not interpret storageidentifier in remote store " + driverId + " : " + storageId); logger.warning(use.getLocalizedMessage()); return false; @@ -642,14 +678,27 @@ public static String getBaseStoreIdFor(String driverId) { public List cleanUp(Predicate filter, boolean dryRun) throws IOException { return baseStore.cleanUp(filter, dryRun); } - + public static void main(String[] args) { System.out.println("Running the main method"); if (args.length > 0) { System.out.printf("List of arguments: {}", Arrays.toString(args)); } - System.setProperty("dataverse.files.globus.base-uri", "12345/top"); + System.setProperty("dataverse.files.globus.base-uri", "2791b83e-b989-47c5-a7fa-ce65fd949522"); System.out.println("Valid: " + isValidIdentifier("globus", "globus://localid//../of/the/hill")); + System.setProperty("dataverse.files.globus.globus-token","Mjc5MWI4M2UtYjk4OS00N2M1LWE3ZmEtY2U2NWZkOTQ5NTIyOlprRmxGejNTWDlkTVpUNk92ZmVJaFQyTWY0SDd4cXBoTDNSS29vUmRGVlE9"); + System.setProperty("dataverse.files.globus.base-store","file"); + System.setProperty("dataverse.files.file.type", + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); + System.setProperty("dataverse.files.file.directory", "/tmp/files"); logger.info(JvmSettings.BASE_URI.lookup("globus")); + try { + GlobusOverlayAccessIO gsio = new GlobusOverlayAccessIO("globus://1234//2791b83e-b989-47c5-a7fa-ce65fd949522/hdc1/image001.mrc", "globus"); + logger.info("Size is " + gsio.getSizeFromGlobus()); + + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index 4fb895f5adc..eac8411c939 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -49,6 +49,7 @@ public enum JvmSettings { FILES_DIRECTORY(SCOPE_FILES, "directory"), FILES(SCOPE_FILES), BASE_URI(FILES, "base-uri"), + GLOBUS_TOKEN(FILES, "globus-token"), // SOLR INDEX SETTINGS SCOPE_SOLR(PREFIX, "solr"), From cef601ee6af94fcfa56d9d02efb807d2c6bb20d2 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 9 May 2023 13:33:17 -0400 Subject: [PATCH 0130/1525] A new, experimental version of the Add Files command with the dynamic permissions lookup that allows it to be checked on either the Dataset or the parent Collection (when files are bing added in the context of creating a new dataset via the Add Dataset page). #9361 --- .../impl/CreateNewDataFilesCommand.java | 49 ++++++++++++++++++- 1 file changed, 47 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 365a260cfd2..fb4f7a7db86 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -9,11 +9,12 @@ import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +//import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper; import edu.harvard.iq.dataverse.DataFileServiceBean.UserStorageQuota; +import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; @@ -42,8 +43,12 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Enumeration; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.logging.Logger; import java.util.zip.GZIPInputStream; import java.util.zip.ZipFile; @@ -57,7 +62,10 @@ * * @author landreev */ -@RequiredPermissions( Permission.EditDataset ) +// Note the commented out @RequiredPermissions. We need to use dynamic +// permissions instead, to accommodate both adding files to an existing +// dataset and files being uploaded on create of a new dataset. +//@RequiredPermissions( Permission.EditDataset ) public class CreateNewDataFilesCommand extends AbstractCommand { private static final Logger logger = Logger.getLogger(CreateNewDataFilesCommand.class.getCanonicalName()); @@ -68,6 +76,7 @@ public class CreateNewDataFilesCommand extends AbstractCommand> getRequiredPermissions() { + Map> ret = new HashMap<>(); + + ret.put("", new HashSet<>()); + + if (dataverse != null) { + // The command is called in the context of uploading files on + // create of a new dataset + ret.get("").add(Permission.AddDataset); + } else { + // An existing dataset + ret.get("").add(Permission.EditDataset); + } + + return ret; + } } From 3c3378f5a3bf39eff13a582d0dc52a2a5549af8f Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 9 May 2023 14:53:25 -0400 Subject: [PATCH 0131/1525] start refactoring Globus bean --- .../dataaccess/GlobusOverlayAccessIO.java | 28 +++++++++----- .../iq/dataverse/globus/AccessToken.java | 2 +- .../dataverse/globus/GlobusServiceBean.java | 37 +++++++++++-------- 3 files changed, 41 insertions(+), 26 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java index 050b9ddc176..0d7c5458e14 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java @@ -5,6 +5,8 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.datavariable.DataVariable; +import edu.harvard.iq.dataverse.globus.AccessToken; +import edu.harvard.iq.dataverse.globus.GlobusServiceBean; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.UrlSignerUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; @@ -28,10 +30,8 @@ import java.util.Arrays; import java.util.List; import java.util.function.Predicate; -import java.util.logging.Level; import java.util.logging.Logger; -import org.apache.commons.lang3.NotImplementedException; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.config.CookieSpecs; import org.apache.http.client.config.RequestConfig; @@ -83,6 +83,8 @@ public class GlobusOverlayAccessIO extends StorageIO { private static boolean trustCerts = false; private int httpConcurrency = 4; + private String globusAccessToken = null; + public GlobusOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) throws IOException { super(dvObject, req, driverId); this.setIsLocalFile(false); @@ -190,18 +192,19 @@ private long getSizeFromGlobus() { URI absoluteURI = null; try { int filenameStart = path.lastIndexOf("/") + 1; - int pathStart = endpointWithBasePath.indexOf("/") + 1; - - String directoryPath = (pathStart > 0 ? endpointWithBasePath.substring(pathStart) : "") + int pathStart = endpointWithBasePath.indexOf("/"); +logger.info("endpointWithBasePath: " + endpointWithBasePath); + String directoryPath = "/" + (pathStart > 0 ? endpointWithBasePath.substring(pathStart) : "") + path.substring(0, filenameStart); + logger.info("directoryPath: " + directoryPath); String filename = path.substring(filenameStart); String endpoint = pathStart > 0 ? endpointWithBasePath.substring(0, pathStart - 1) : endpointWithBasePath; - absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint + "/ls?path=" + path + "&filter=name:" + filename); + absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint + "/ls?path=" + directoryPath + "&filter=name:" + filename); HttpGet get = new HttpGet(absoluteURI); - String token = JvmSettings.GLOBUS_TOKEN.lookup(driverId); - logger.info("Token is " + token); - get.addHeader("Authorization", "Bearer " + token); + + logger.info("Token is " + globusAccessToken); + get.addHeader("Authorization", "Bearer " + globusAccessToken); CloseableHttpResponse response = getSharedHttpClient().execute(get, localContext); if (response.getStatusLine().getStatusCode() == 200) { //Get reponse as string @@ -482,6 +485,8 @@ int getUrlExpirationMinutes() { } private void configureStores(DataAccessRequest req, String driverId, String storageLocation) throws IOException { + AccessToken accessToken = GlobusServiceBean.getClientToken(JvmSettings.GLOBUS_TOKEN.lookup(driverId)); + globusAccessToken = accessToken.getOtherTokens().get(0).getAccessToken(); endpointWithBasePath = JvmSettings.BASE_URI.lookup(this.driverId); logger.info("base-uri is " + endpointWithBasePath); if (endpointWithBasePath == null) { @@ -692,8 +697,11 @@ public static void main(String[] args) { DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); System.setProperty("dataverse.files.file.directory", "/tmp/files"); logger.info(JvmSettings.BASE_URI.lookup("globus")); + + + try { - GlobusOverlayAccessIO gsio = new GlobusOverlayAccessIO("globus://1234//2791b83e-b989-47c5-a7fa-ce65fd949522/hdc1/image001.mrc", "globus"); + GlobusOverlayAccessIO gsio = new GlobusOverlayAccessIO("globus://1234///hdc1/image001.mrc", "globus"); logger.info("Size is " + gsio.getSizeFromGlobus()); } catch (IOException e) { diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java b/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java index 877fc68e4a1..c93e2c6aa94 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java @@ -46,7 +46,7 @@ String getRefreshToken() { return refreshToken; } - ArrayList getOtherTokens() { + public ArrayList getOtherTokens() { return otherTokens; } diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 9d80c5cc280..c2137dd1f47 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -167,7 +167,8 @@ public void updatePermision(AccessToken clientTokenUser, String directory, Strin public void deletePermision(String ruleId, Logger globusLogger) throws MalformedURLException { if (ruleId.length() > 0) { - AccessToken clientTokenUser = getClientToken(); + AccessToken clientTokenUser = getClientToken(settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, "")); + globusLogger.info("Start deleting permissions."); String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, ""); @@ -264,15 +265,21 @@ public GlobusTask getTask(AccessToken clientTokenUser, String taskId, Logger glo return task; } - public AccessToken getClientToken() throws MalformedURLException { - String globusBasicToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, ""); - URL url = new URL( - "https://auth.globus.org/v2/oauth2/token?scope=openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all&grant_type=client_credentials"); - - MakeRequestResponse result = makeRequest(url, "Basic", globusBasicToken, "POST", null); + public static AccessToken getClientToken(String globusBasicToken) { + URL url; AccessToken clientTokenUser = null; - if (result.status == 200) { - clientTokenUser = parseJson(result.jsonResponse, AccessToken.class, true); + + try { + url = new URL( + "https://auth.globus.org/v2/oauth2/token?scope=openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all&grant_type=client_credentials"); + + MakeRequestResponse result = makeRequest(url, "Basic", globusBasicToken, "POST", null); + if (result.status == 200) { + clientTokenUser = parseJson(result.jsonResponse, AccessToken.class, true); + } + } catch (MalformedURLException e) { + // On a statically defined URL... + e.printStackTrace(); } return clientTokenUser; } @@ -306,7 +313,7 @@ public AccessToken getAccessToken(HttpServletRequest origRequest, String globusB } - public MakeRequestResponse makeRequest(URL url, String authType, String authCode, String method, + public static MakeRequestResponse makeRequest(URL url, String authType, String authCode, String method, String jsonString) { String str = null; HttpURLConnection connection = null; @@ -359,7 +366,7 @@ public MakeRequestResponse makeRequest(URL url, String authType, String authCode } - private StringBuilder readResultJson(InputStream in) { + private static StringBuilder readResultJson(InputStream in) { StringBuilder sb = null; try { @@ -378,7 +385,7 @@ private StringBuilder readResultJson(InputStream in) { return sb; } - private T parseJson(String sb, Class jsonParserClass, boolean namingPolicy) { + private static T parseJson(String sb, Class jsonParserClass, boolean namingPolicy) { if (sb != null) { Gson gson = null; if (namingPolicy) { @@ -420,7 +427,7 @@ public String getDirectory(String datasetId) { } - class MakeRequestResponse { + static class MakeRequestResponse { public String jsonResponse; public int status; @@ -451,7 +458,7 @@ public boolean giveGlobusPublicPermissions(String datasetId) if (globusEndpoint.equals("") || globusBasicToken.equals("")) { return false; } - AccessToken clientTokenUser = getClientToken(); + AccessToken clientTokenUser = getClientToken(settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, "")); if (clientTokenUser == null) { logger.severe("Cannot get client token "); return false; @@ -908,7 +915,7 @@ private GlobusTask globusStatusCheck(String taskId, Logger globusLogger) throws try { globusLogger.info("checking globus transfer task " + taskId); Thread.sleep(pollingInterval * 1000); - AccessToken clientTokenUser = getClientToken(); + AccessToken clientTokenUser = getClientToken(settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, "")); // success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskId); task = getTask(clientTokenUser, taskId, globusLogger); if (task != null) { From f14b75454a524fd8816d6f5367b0e15fbd0ded92 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 9 May 2023 14:53:56 -0400 Subject: [PATCH 0132/1525] enable globus store main() to run - will revert --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index e5b191f0ed7..4926f59f8a0 100644 --- a/pom.xml +++ b/pom.xml @@ -184,7 +184,7 @@ org.glassfish jakarta.json - provided + From 51cfa409ddaa7d9d111bff71d6e69beff0a3454c Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 9 May 2023 15:53:31 -0400 Subject: [PATCH 0133/1525] Changes to the edit files page to accommodate "upload on create" #9361 --- .../edu/harvard/iq/dataverse/EditDatafilesPage.java | 11 ++++++++++- .../command/impl/CreateNewDataFilesCommand.java | 3 ++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index c39e6f62ce2..b892acdb527 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -2058,7 +2058,16 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { // zip file. ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig); - Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); + Command cmd; + if (mode == FileEditMode.CREATE) { + // This is a file upload in the context of creating a brand new + // dataset that does not yet exist in the database. We must + // use the version of the Create New Files constructor that takes + // the parent Dataverse as the extra argument: + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, null, workingVersion.getDataset().getOwner()); + } else { + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); + } CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index fb4f7a7db86..e2d7f834c4a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -64,7 +64,8 @@ */ // Note the commented out @RequiredPermissions. We need to use dynamic // permissions instead, to accommodate both adding files to an existing -// dataset and files being uploaded on create of a new dataset. +// dataset and files being uploaded in the context of creating a new dataset +// via the Add Dataset page. //@RequiredPermissions( Permission.EditDataset ) public class CreateNewDataFilesCommand extends AbstractCommand { private static final Logger logger = Logger.getLogger(CreateNewDataFilesCommand.class.getCanonicalName()); From 8560ba4e16f763d48e6ec20f1ddaecd9b5728603 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 9 May 2023 17:16:56 -0400 Subject: [PATCH 0134/1525] Makes the create file command refuse a zip file if uncompressed content exceeds the remaining quota; as opposed to accepting it zipped, even if the compressed size under the quota. #9361 --- .../command/impl/CreateNewDataFilesCommand.java | 17 +++++++++++------ src/main/java/propertyFiles/Bundle.properties | 1 + 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index e2d7f834c4a..2608069dcb2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -348,7 +348,11 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException if (storageQuotaLimit != null) { combinedUnzippedFileSize = combinedUnzippedFileSize + entry.getSize(); if (combinedUnzippedFileSize > storageQuotaLimit) { - throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(combinedUnzippedFileSize), bytesToHumanReadable(storageQuotaLimit))); + //throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(combinedUnzippedFileSize), bytesToHumanReadable(storageQuotaLimit))); + // change of plans: if the unzipped content inside exceeds the remaining quota, + // we reject the upload outright, rather than accepting the zip + // file as is. + throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.unzipped.quota_exceeded"), bytesToHumanReadable(storageQuotaLimit)), this); } } } @@ -476,11 +480,12 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException logger.warning("One of the unzipped files exceeds the size limit; resorting to saving the file as is. " + femsx.getMessage()); warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.size", Arrays.asList(FileSizeChecker.bytesToHumanReadable(fileSizeLimit))); datafiles.clear(); - } catch (FileExceedsStorageQuotaException fesqx) { - logger.warning("One of the unzipped files exceeds the storage quota limit; resorting to saving the file as is. " + fesqx.getMessage()); - warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.quota", Arrays.asList(FileSizeChecker.bytesToHumanReadable(storageQuotaLimit))); - datafiles.clear(); - } finally { + } /*catch (FileExceedsStorageQuotaException fesqx) { + //logger.warning("One of the unzipped files exceeds the storage quota limit; resorting to saving the file as is. " + fesqx.getMessage()); + //warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.quota", Arrays.asList(FileSizeChecker.bytesToHumanReadable(storageQuotaLimit))); + //datafiles.clear(); + throw new CommandExecutionException(fesqx.getMessage(), fesqx, this); + }*/ finally { if (zipFile != null) { try { zipFile.close(); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 55679a01a07..b12e0df7c08 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2155,6 +2155,7 @@ file.addreplace.file_size_ok=File size is in range. file.addreplace.error.byte_abrev=B file.addreplace.error.file_exceeds_limit=This file size ({0}) exceeds the size limit of {1}. file.addreplace.error.quota_exceeded=This file (size {0}) exceeds the remaining storage quota of {1}. +file.addreplace.error.unzipped.quota_exceeded=Unzipped files exceed the remaining storage quota of {0}. file.addreplace.error.dataset_is_null=The dataset cannot be null. file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null. file.addreplace.error.parsing=Error in parsing provided json From 51114583888e151d9830c51c8b1109a5d72bf663 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 10 May 2023 14:56:56 +0200 Subject: [PATCH 0135/1525] docs(ct): start extending dev usage docs --- .../source/container/dev-usage.rst | 99 +++++++++++++++++-- 1 file changed, 92 insertions(+), 7 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 4eba70eb23b..28ab7196926 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -17,21 +17,91 @@ To test drive these local changes to the Dataverse codebase in a containerized a setup described in :doc:`../developers/dev-environment`), you must a) build the application container and b) run it in addition to the necessary dependencies. -Building and Running --------------------- +Building +-------- To build the application image, run the following command, as described in :doc:`app-image`: ``mvn -Pct clean package`` -Now, start all the containers with a single command: +Once this is done, you will see an image ``gdcc/dataverse:unstable`` available in your Docker cache. + +**Note:** This will skip any unit tests. If you have built the code before for testing, etc. you might omit the ``clean`` to +avoid recompiling. + +**Note:** Also we have a ``docker-compose-dev.yml`` file, it's currently not possible to build the images without +invoking Maven. This might change in the future. + +Running +------- + +After building the app image containing your local changes to the Dataverse application, you want to run it together +with all dependencies. There are four ways to do this (commands executed at root of project directory): + +.. list-table:: Cheatsheet: Running Containers + :widths: 15 40 45 + :header-rows: 1 + :stub-columns: 1 + :align: left + + * - \ + - Using Maven + - Using Compose + * - In foreground + - ``mvn -Pct docker:run`` + - ``docker compose -f docker-compose-dev.yml up`` + * - In background + - ``mvn -Pct docker:start`` + - ``docker compose -f docker-compose-dev.yml up -d`` + +Both ways have their pros and cons: + +.. list-table:: Decision Helper: Fore- or Background? + :widths: 15 40 45 + :header-rows: 1 + :stub-columns: 1 + :align: left + + * - \ + - Pros + - Cons + * - Foreground + - | Logs scroll by when interacting with API / UI + | To stop all containers simply hit ``Ctrl+C`` + - | Lots and lots of logs scrolling by + | Must stop all containers to restart + * - Background + - | No logs scrolling by + | Easy to replace single containers + - | No logs scrolling by + | Stopping containers needs an extra command + +In case you want to concatenate building and running, here's a cheatsheet for you: + +.. list-table:: Cheatsheet: Building and Running Containers + :widths: 15 40 45 + :header-rows: 1 + :stub-columns: 1 + :align: left + + * - \ + - Using Maven + - Using Compose + * - In foreground + - ``mvn -Pct package docker:run`` + - ``mvn -Pct package && docker compose -f docker-compose-dev.yml up`` + * - In background + - ``mvn -Pct package docker:start`` + - ``mvn -Pct package && docker compose -f docker-compose-dev.yml up -d`` -``mvn -Pct docker:run`` +Once all containers have been started, you can check if the application was deployed correctly by checking the version +at http://localhost:8080/api/info/version. or watch the logs. -(You could also concatenate both commands into one.) +**Note:** To stop all containers you started in background, invoke ``mvn -Pct docker:stop`` or +``docker compose -f docker-compose-dev.yml down``. -Once all containers have been started, you can check if the application was deployed correctly by checking the version -at http://localhost:8080/api/info/version. +Bootstrapping New Instance +-------------------------- If all looks good, run the :download:`docker-final-setup.sh <../../../../scripts/dev/docker-final-setup.sh>` script below. (This is a simplified version of the script described in :ref:`rebuilding-dev-environment`.) @@ -49,3 +119,18 @@ You can also access the Payara Admin Console if needed, which is available at ht Note that data is persisted in ``./docker-dev-volumes`` in the root of the Git repo. For a clean start, you should remove this directory before running the ``mvn`` commands above. + +Viewing Logs +------------ + +TODO + +Re-Deploying +------------ + +TODO + +Using A Debugger +---------------- + +TODO From 78d035e348d029eb28f0e96e62036718995a8e75 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 10 May 2023 15:21:14 +0200 Subject: [PATCH 0136/1525] docs(ct): add quickstart for dev usage --- doc/sphinx-guides/source/container/dev-usage.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 28ab7196926..843ebef50b6 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -17,6 +17,12 @@ To test drive these local changes to the Dataverse codebase in a containerized a setup described in :doc:`../developers/dev-environment`), you must a) build the application container and b) run it in addition to the necessary dependencies. +| **TL;DR** *I have all that, just give me containers!* +| Execute ``mvn -Pct clean package docker:run``, wait and continue at :ref:`dev-bootstrap`. + + +.. _dev-build: + Building -------- @@ -32,6 +38,9 @@ avoid recompiling. **Note:** Also we have a ``docker-compose-dev.yml`` file, it's currently not possible to build the images without invoking Maven. This might change in the future. + +.. _dev-run: + Running ------- @@ -100,6 +109,10 @@ at http://localhost:8080/api/info/version. or watch the logs. **Note:** To stop all containers you started in background, invoke ``mvn -Pct docker:stop`` or ``docker compose -f docker-compose-dev.yml down``. + + +.. _dev-bootstrap: + Bootstrapping New Instance -------------------------- From bfd2e953514ed9efcf5f48931d929b6678e3bb89 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 10 May 2023 15:42:40 +0200 Subject: [PATCH 0137/1525] docs(ct): add log viewing hints for dev usage --- .../source/container/dev-usage.rst | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 843ebef50b6..bf0629b8d75 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -133,10 +133,25 @@ You can also access the Payara Admin Console if needed, which is available at ht Note that data is persisted in ``./docker-dev-volumes`` in the root of the Git repo. For a clean start, you should remove this directory before running the ``mvn`` commands above. + +.. _dev-logs: + Viewing Logs ------------ -TODO +In case you started containers in background mode (see :ref:`dev-run`), you can use the following commands to view and/or +watch logs from the containers. + +The safe bet for any running container's logs is to lookup the container name via ``docker ps`` and use it in +``docker logs ``. You can tail logs by adding ``-n`` and follow them by adding ``-f`` (just like ``tail`` cmd). +See ``docker logs --help`` for more. + +Alternatives: + +- In case you used Maven for running, you may use ``mvn -Pct docker:logs -Ddocker.filter=``. +- If you used Docker Compose for running, you may use ``docker compose -f docker-compose-dev.yml logs ``. + Options are the same. + Re-Deploying ------------ From 149d50030926c41ee0af5a59eefdcd1e9c69984a Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 10 May 2023 16:12:36 +0200 Subject: [PATCH 0138/1525] docs,feat(ct): enable and document JDWP usage for dev usage --- doc/sphinx-guides/source/container/dev-usage.rst | 9 ++++++++- docker-compose-dev.yml | 1 + 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index bf0629b8d75..2f918ecd551 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -161,4 +161,11 @@ TODO Using A Debugger ---------------- -TODO +The :doc:`base-image` enables usage of the `Java Debugging Wire Protocol `_ +for remote debugging if you set ``ENABLE_JDWP=1`` as environment variable for the application container. +The default configuration when executing containers with the commands listed at :ref:`dev-run` already enables this. + +There are a lot of tutorials how to connect your IDE's debugger to a remote endpoint. Please use ``localhost:9009`` +as the endpoint. Here are links to the most common IDEs docs on remote debugging: +`Eclipse `_, +`IntelliJ `_ diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 2b079cf6029..2fe9ba71926 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -12,6 +12,7 @@ services: - DATAVERSE_DB_HOST=postgres - DATAVERSE_DB_PASSWORD=secret - DATAVERSE_DB_USER=${DATAVERSE_DB_USER} + - ENABLE_JDWP=1 ports: - "8080:8080" # HTTP (Dataverse Application) - "4848:4848" # HTTP (Payara Admin Console) From 1386f1e77d864c18b86deeb9bcac678e51e289a4 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 10 May 2023 17:08:26 +0200 Subject: [PATCH 0139/1525] docs(ct): add notes about redeploying in dev usage --- .../source/container/dev-usage.rst | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 2f918ecd551..3eebb10cee5 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -156,7 +156,23 @@ Alternatives: Re-Deploying ------------ -TODO +Currently, the only safe and tested way to re-deploy the Dataverse application after you applied code changes is +by recreating the container(s). In the future, more options may be added here. + +If you started your containers in foreground, just stop them and follow the steps for building and running again. +The same goes for using Maven to start the containers in the background. + +In case of using Docker Compose and starting the containers in the background, you can use a workaround to only +restart the application container: + +.. code-block:: + + # First rebuild the container (will complain about an image still in use, this is fine.) + mvn -Pct package + # Then re-create the container (will automatically restart the container for you) + docker compose -f docker-compose-dev.yml create dev_dataverse + +Using ``docker container inspect dev_dataverse | grep Image`` you can verify the changed checksums. Using A Debugger ---------------- From 64743bf7ae09d0911ca09d1c7eff99a7cf147331 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 11 May 2023 11:17:34 +0200 Subject: [PATCH 0140/1525] dataset page performance improvements --- .../edu/harvard/iq/dataverse/DatasetPage.java | 94 +++++++++---------- .../edu/harvard/iq/dataverse/DvObject.java | 4 + .../iq/dataverse/DvObjectServiceBean.java | 31 ++++++ 3 files changed, 81 insertions(+), 48 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 9294620d790..aae7d11b90f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -40,6 +40,7 @@ import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.license.LicenseServiceBean; import edu.harvard.iq.dataverse.metadataimport.ForeignMetadataImportServiceBean; +import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; import edu.harvard.iq.dataverse.privateurl.PrivateUrlUtil; @@ -81,6 +82,8 @@ import java.util.Set; import java.util.Collection; import java.util.logging.Logger; +import java.util.stream.Collectors; + import javax.ejb.EJB; import javax.ejb.EJBException; import javax.faces.application.FacesMessage; @@ -233,6 +236,8 @@ public enum DisplayMode { ExternalToolServiceBean externalToolService; @EJB SolrClientService solrClientService; + @EJB + DvObjectServiceBean dvObjectService; @Inject DataverseRequestServiceBean dvRequestService; @Inject @@ -678,48 +683,43 @@ public void showAll(){ } private List selectFileMetadatasForDisplay() { - Set searchResultsIdSet = null; - - if (isIndexedVersion()) { + final Set searchResultsIdSet; + if (StringUtil.isEmpty(fileLabelSearchTerm) && StringUtil.isEmpty(fileTypeFacet) && StringUtil.isEmpty(fileAccessFacet) && StringUtil.isEmpty(fileTagsFacet)) { + // But, if no search terms were specified, we return the full + // list of the files in the version: + // Since the search results should include the full set of fmds if all the + // terms/facets are empty, setting them to null should just be + // an optimization for the loop below + searchResultsIdSet = null; + } else if (isIndexedVersion()) { // We run the search even if no search term and/or facets are // specified - to generate the facet labels list: searchResultsIdSet = getFileIdsInVersionFromSolr(workingVersion.getId(), this.fileLabelSearchTerm); - // But, if no search terms were specified, we return the full - // list of the files in the version: - if (StringUtil.isEmpty(fileLabelSearchTerm) - && StringUtil.isEmpty(fileTypeFacet) - && StringUtil.isEmpty(fileAccessFacet) - && StringUtil.isEmpty(fileTagsFacet)) { - // Since the search results should include the full set of fmds if all the - // terms/facets are empty, setting them to null should just be - // an optimization for the loop below - searchResultsIdSet = null; - } - } else { + } else if (!StringUtil.isEmpty(this.fileLabelSearchTerm)) { // No, this is not an indexed version. // If the search term was specified, we'll run a search in the db; // if not - return the full list of files in the version. // (no facets without solr!) - if (!StringUtil.isEmpty(this.fileLabelSearchTerm)) { - searchResultsIdSet = getFileIdsInVersionFromDb(workingVersion.getId(), this.fileLabelSearchTerm); - } + searchResultsIdSet = getFileIdsInVersionFromDb(workingVersion.getId(), this.fileLabelSearchTerm); + } else { + searchResultsIdSet = null; } - List retList = new ArrayList<>(); - - for (FileMetadata fileMetadata : workingVersion.getFileMetadatas()) { - if (searchResultsIdSet == null || searchResultsIdSet.contains(fileMetadata.getDataFile().getId())) { - retList.add(fileMetadata); - } + final List md = workingVersion.getFileMetadatas(); + final List retList; + if (searchResultsIdSet == null) { + retList = new ArrayList<>(md); + } else { + retList = md.stream().filter(x -> searchResultsIdSet.contains(x.getDataFile().getId())).collect(Collectors.toList()); } sortFileMetadatas(retList); return retList; } - private void sortFileMetadatas(List fileList) { + private void sortFileMetadatas(final List fileList) { - DataFileComparator dfc = new DataFileComparator(); - Comparator comp = dfc.compareBy(folderPresort, tagPresort, fileSortField, !"desc".equals(fileSortOrder)); + final DataFileComparator dfc = new DataFileComparator(); + final Comparator comp = dfc.compareBy(folderPresort, tagPresort, fileSortField, !"desc".equals(fileSortOrder)); Collections.sort(fileList, comp); } @@ -1843,6 +1843,17 @@ public boolean webloaderUploadSupported() { return settingsWrapper.isWebloaderUpload() && StorageIO.isDirectUploadEnabled(dataset.getEffectiveStorageDriverId()); } + private void setIdByPersistentId() { + GlobalId gid = PidUtil.parseAsGlobalID(persistentId); + Long id = dvObjectService.findIdByGlobalId(gid, DvObject.DType.Dataset); + if (id == null) { + id = dvObjectService.findIdByAltGlobalId(gid, DvObject.DType.Dataset); + } + if (id != null) { + this.setId(id); + } + } + private String init(boolean initFull) { //System.out.println("_YE_OLDE_QUERY_COUNTER_"); // for debug purposes @@ -1866,21 +1877,9 @@ private String init(boolean initFull) { // Set the workingVersion and Dataset // --------------------------------------- if (persistentId != null) { - logger.fine("initializing DatasetPage with persistent ID " + persistentId); - // Set Working Version and Dataset by PersistentID - dataset = datasetService.findByGlobalId(persistentId); - if (dataset == null) { - logger.warning("No such dataset: "+persistentId); - return permissionsWrapper.notFound(); - } - logger.fine("retrieved dataset, id="+dataset.getId()); - - retrieveDatasetVersionResponse = datasetVersionService.selectRequestedVersion(dataset.getVersions(), version); - //retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionByPersistentId(persistentId, version); - this.workingVersion = retrieveDatasetVersionResponse.getDatasetVersion(); - logger.fine("retrieved version: id: " + workingVersion.getId() + ", state: " + this.workingVersion.getVersionState()); - - } else if (this.getId() != null) { + setIdByPersistentId(); + } + if (this.getId() != null) { // Set Working Version and Dataset by Datasaet Id and Version dataset = datasetService.find(this.getId()); if (dataset == null) { @@ -2835,15 +2834,14 @@ public String refresh() { DatasetVersionServiceBean.RetrieveDatasetVersionResponse retrieveDatasetVersionResponse = null; if (persistentId != null) { - //retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionByPersistentId(persistentId, version); - dataset = datasetService.findByGlobalId(persistentId); - retrieveDatasetVersionResponse = datasetVersionService.selectRequestedVersion(dataset.getVersions(), version); - } else if (versionId != null) { - retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionByVersionId(versionId); - } else if (dataset.getId() != null) { + setIdByPersistentId(); + } + if (dataset.getId() != null) { //retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionById(dataset.getId(), version); dataset = datasetService.find(dataset.getId()); retrieveDatasetVersionResponse = datasetVersionService.selectRequestedVersion(dataset.getVersions(), version); + } else if (versionId != null) { + retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionByVersionId(versionId); } if (retrieveDatasetVersionResponse == null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index 854888737ee..e3013b8cf51 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -30,9 +30,13 @@ query="SELECT COUNT(obj) FROM DvObject obj WHERE obj.owner.id=:id"), @NamedQuery(name = "DvObject.findByGlobalId", query = "SELECT o FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), + @NamedQuery(name = "DvObject.findIdByGlobalId", + query = "SELECT o.id FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"), @NamedQuery(name = "DvObject.findByAlternativeGlobalId", query = "SELECT o FROM DvObject o, AlternativePersistentIdentifier a WHERE o.id = a.dvObject.id and a.identifier=:identifier and a.authority=:authority and a.protocol=:protocol and o.dtype=:dtype"), + @NamedQuery(name = "DvObject.findIdByAlternativeGlobalId", + query = "SELECT o.id FROM DvObject o, AlternativePersistentIdentifier a WHERE o.id = a.dvObject.id and a.identifier=:identifier and a.authority=:authority and a.protocol=:protocol and o.dtype=:dtype"), @NamedQuery(name = "DvObject.findByProtocolIdentifierAuthority", query = "SELECT o FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol"), diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java index e22e2f188fd..3430528aea3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java @@ -116,6 +116,16 @@ public DvObject findByAltGlobalId(GlobalId globalId, DvObject.DType dtype) { return runFindByGlobalId(query, globalId, dtype); } + public Long findIdByGlobalId(GlobalId globalId, DvObject.DType dtype) { + Query query = em.createNamedQuery("DvObject.findIdByGlobalId"); + return runFindIdByGlobalId(query, globalId, dtype); + } + + public Long findIdByAltGlobalId(GlobalId globalId, DvObject.DType dtype) { + Query query = em.createNamedQuery("DvObject.findIdByAlternativeGlobalId"); + return runFindIdByGlobalId(query, globalId, dtype); + } + private DvObject runFindByGlobalId(Query query, GlobalId gid, DvObject.DType dtype) { DvObject foundDvObject = null; try { @@ -136,6 +146,27 @@ private DvObject runFindByGlobalId(Query query, GlobalId gid, DvObject.DType dty } return foundDvObject; } + + private Long runFindIdByGlobalId(Query query, GlobalId gid, DvObject.DType dtype) { + Long foundDvObject = null; + try { + query.setParameter("identifier", gid.getIdentifier()); + query.setParameter("protocol", gid.getProtocol()); + query.setParameter("authority", gid.getAuthority()); + query.setParameter("dtype", dtype.getDType()); + foundDvObject = (Long) query.getSingleResult(); + } catch (javax.persistence.NoResultException e) { + // (set to .info, this can fill the log file with thousands of + // these messages during a large harvest run) + logger.fine("no dvObject found: " + gid.asString()); + // DO nothing, just return null. + return null; + } catch (Exception ex) { + logger.info("Exception caught in findByGlobalId: " + ex.getLocalizedMessage()); + return null; + } + return foundDvObject; + } public DvObject findByGlobalId(GlobalId globalId) { return (DvObject) em.createNamedQuery("DvObject.findByProtocolIdentifierAuthority") From 2ac403d35220f6ec61242f7f7f1c12b00f36a93c Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 11 May 2023 11:04:00 -0400 Subject: [PATCH 0141/1525] changes needed to accommodate the quota changing dynamically, before the uploaded files are saved (when uploading via the page) #9361 --- .../iq/dataverse/EditDatafilesPage.java | 25 +++++++----- .../datadeposit/MediaResourceManagerImpl.java | 10 +++-- .../datasetutility/AddReplaceFileHelper.java | 6 ++- .../impl/CreateNewDataFilesCommand.java | 39 +++++++++++++++---- 4 files changed, 57 insertions(+), 23 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index b892acdb527..57729f321b5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.provenance.ProvPopupFragmentBean; import edu.harvard.iq.dataverse.DataFile.ChecksumType; +import edu.harvard.iq.dataverse.DataFileServiceBean.UserStorageQuota; import edu.harvard.iq.dataverse.api.AbstractApiBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; @@ -206,6 +207,7 @@ public enum Referrer { private final int NUMBER_OF_SCROLL_ROWS = 25; private DataFile singleFile = null; + private UserStorageQuota userStorageQuota = null; public DataFile getSingleFile() { return singleFile; @@ -358,7 +360,7 @@ public String getHumanMaxTotalUploadSizeInBytes() { } public boolean isStorageQuotaEnforced() { - return maxTotalUploadSizeInBytes != null; + return userStorageQuota != null; } public Long getMaxIngestSizeInBytes() { @@ -529,22 +531,24 @@ public String initCreateMode(String modeToken, DatasetVersion version, MutableBo this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); if (systemConfig.isStorageQuotasEnforced()) { - this.maxTotalUploadSizeInBytes = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes(); + this.userStorageQuota = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset); + this.maxTotalUploadSizeInBytes = userStorageQuota.getRemainingQuotaInBytes(); } else { this.maxTotalUploadSizeInBytes = null; } this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit(); - + logger.fine("done"); saveEnabled = true; + return null; } public boolean isQuotaExceeded() { - return systemConfig.isStorageQuotasEnforced() && datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes() == 0; + return systemConfig.isStorageQuotasEnforced() && userStorageQuota != null && userStorageQuota.getRemainingQuotaInBytes() == 0; } public String init() { @@ -589,7 +593,8 @@ public String init() { clone = workingVersion.cloneDatasetVersion(); this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); if (systemConfig.isStorageQuotasEnforced()) { - this.maxTotalUploadSizeInBytes = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes(); + this.userStorageQuota = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset); + this.maxTotalUploadSizeInBytes = userStorageQuota.getRemainingQuotaInBytes(); } this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); @@ -687,7 +692,7 @@ public String init() { if (isHasPublicStore()){ JH.addMessage(FacesMessage.SEVERITY_WARN, getBundleString("dataset.message.label.fileAccess"), getBundleString("dataset.message.publicInstall")); } - + return null; } @@ -1525,7 +1530,7 @@ public void handleDropBoxUpload(ActionEvent event) { // zip file. //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); //CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null, systemConfig); - Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, dropBoxStream, fileName, "application/octet-stream", null, userStorageQuota, null); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage)); @@ -2064,9 +2069,9 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { // dataset that does not yet exist in the database. We must // use the version of the Create New Files constructor that takes // the parent Dataverse as the extra argument: - cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, null, workingVersion.getDataset().getOwner()); + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, userStorageQuota, null, null, workingVersion.getDataset().getOwner()); } else { - cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, userStorageQuota, null); } CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); @@ -2193,7 +2198,7 @@ public void handleExternalUpload() { //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType, systemConfig); - Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, userStorageQuota, checksumValue, checksumType); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage)); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index c71eeb3d375..0f11b858238 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -10,9 +10,8 @@ import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.PermissionServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.dataaccess.StorageIO; -import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; +import edu.harvard.iq.dataverse.DataFileServiceBean.UserStorageQuota; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; @@ -22,7 +21,6 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.ConstraintViolationUtil; -import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -309,7 +307,11 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au try { //CreateDataFileResult createDataFilesResponse = FileUtil.createDataFiles(editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null, systemConfig); - Command cmd = new CreateNewDataFilesCommand(dvReq, editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null); + UserStorageQuota quota = null; + if (systemConfig.isStorageQuotasEnforced()) { + quota = dataFileService.getUserStorageQuota(user, dataset); + } + Command cmd = new CreateNewDataFilesCommand(dvReq, editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, quota, null); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); dataFiles = createDataFilesResult.getDataFiles(); } catch (CommandException ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index 3914c4dc4cc..38a31dba82f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -1212,7 +1212,11 @@ private boolean step_030_createNewFilesViaIngest(){ this.newCheckSumType, this.systemConfig);*/ - Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, newCheckSum, newCheckSumType); + DataFileServiceBean.UserStorageQuota quota = null; + if (systemConfig.isStorageQuotasEnforced()) { + quota = fileService.getUserStorageQuota(dvRequest.getAuthenticatedUser(), dataset); + } + Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); initialFileList = createDataFilesResult.getDataFiles(); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 2608069dcb2..b7ceddcd8bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -78,12 +78,13 @@ public class CreateNewDataFilesCommand extends AbstractCommand datafiles = new ArrayList<>(); //When there is no checksum/checksumtype being sent (normal upload, needs to be calculated), set the type to the current default @@ -132,7 +136,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException if (ctxt.systemConfig().isStorageQuotasEnforced()) { //storageQuotaLimit = ctxt.files().getClass()...; - UserStorageQuota quota = ctxt.files().getUserStorageQuota(super.getRequest().getAuthenticatedUser(), this.version.getDataset()); + //UserStorageQuota quota = ctxt.files().getUserStorageQuota(super.getRequest().getAuthenticatedUser(), this.version.getDataset()); if (quota != null) { storageQuotaLimit = quota.getRemainingQuotaInBytes(); } @@ -247,6 +251,10 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } datafiles.add(datafile); + // Update quota if present + if (quota != null) { + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() - datafile.getFilesize()); + } return CreateDataFileResult.success(fileName, finalType, datafiles); } @@ -259,6 +267,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException ZipEntry zipEntry = null; int fileNumberLimit = ctxt.systemConfig().getZipUploadFilesLimit(); + Long combinedUnzippedFileSize = 0L; try { Charset charset = null; @@ -307,8 +316,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException */ int numberOfUnpackableFiles = 0; - Long combinedUnzippedFileSize = 0L; - + /** * Note that we can't just use zipFile.size(), * unfortunately, since that's the total number of entries, @@ -363,6 +371,8 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // Close the ZipFile, re-open as ZipInputStream: zipFile.close(); + // reset: + combinedUnzippedFileSize = 0L; if (charset != null) { unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset); @@ -458,6 +468,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } datafiles.add(datafile); + combinedUnzippedFileSize += datafile.getFilesize(); } } } @@ -505,6 +516,10 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // do nothing - it's just a temp file. logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); } + // update the quota object: + if (quota != null) { + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + combinedUnzippedFileSize); + } // and return: return CreateDataFileResult.success(fileName, finalType, datafiles); } @@ -524,9 +539,9 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException logger.severe("Processing of zipped shapefile failed."); return CreateDataFileResult.error(fileName, finalType); } + Long storageQuotaLimitForRezippedFiles = storageQuotaLimit; try { - Long storageQuotaLimitForRezippedFiles = storageQuotaLimit; for (File finalFile : shpIngestHelper.getFinalRezippedFiles()) { FileInputStream finalFileInputStream = new FileInputStream(finalFile); @@ -598,6 +613,10 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException logger.warning("Unable to delete: " + tempFile.toString() + "due to Security Exception: " + se.getMessage()); } + // update the quota object: + if (quota != null) { + quota.setTotalUsageInBytes(storageQuotaLimitForRezippedFiles); + } return CreateDataFileResult.success(fileName, finalType, datafiles); } else { logger.severe("No files added from directory of rezipped shapefiles"); @@ -665,6 +684,10 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } datafiles.add(datafile); + // Update quota (may not be necessary in the context of direct upload - ?) + if (quota != null) { + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() - datafile.getFilesize()); + } return CreateDataFileResult.success(fileName, finalType, datafiles); } From 857e66749e9db76f5f185b7a636821f14b3c6a01 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 11 May 2023 18:45:17 +0200 Subject: [PATCH 0142/1525] faster and not locking find query for dataset with many files --- .../edu/harvard/iq/dataverse/Dataset.java | 2 ++ .../edu/harvard/iq/dataverse/DatasetPage.java | 12 +++++----- .../iq/dataverse/DatasetServiceBean.java | 23 +++++++++++++++++++ 3 files changed, 31 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 683b6687c8b..da88a388806 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -43,6 +43,8 @@ * @author skraffmiller */ @NamedQueries({ + @NamedQuery(name = "Dataset.findById", + query = "SELECT o FROM Dataset o LEFT JOIN FETCH o.files WHERE o.id=:id"), @NamedQuery(name = "Dataset.findIdStale", query = "SELECT d.id FROM Dataset d WHERE d.indexTime is NULL OR d.indexTime < d.modificationTime"), @NamedQuery(name = "Dataset.findIdStalePermission", diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index aae7d11b90f..160c605bb7f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -1881,7 +1881,7 @@ private String init(boolean initFull) { } if (this.getId() != null) { // Set Working Version and Dataset by Datasaet Id and Version - dataset = datasetService.find(this.getId()); + dataset = datasetService.findDeep(this.getId()); if (dataset == null) { logger.warning("No such dataset: "+dataset); return permissionsWrapper.notFound(); @@ -1980,8 +1980,8 @@ private String init(boolean initFull) { } else { // an attempt to retreive both the filemetadatas and datafiles early on, so that // we don't have to do so later (possibly, many more times than necessary): - AuthenticatedUser au = session.getUser() instanceof AuthenticatedUser ? (AuthenticatedUser) session.getUser() : null; - datafileService.findFileMetadataOptimizedExperimental(dataset, workingVersion, au); + //AuthenticatedUser au = session.getUser() instanceof AuthenticatedUser ? (AuthenticatedUser) session.getUser() : null; + //datafileService.findFileMetadataOptimizedExperimental(dataset, workingVersion, au); } // This will default to all the files in the version, if the search term // parameter hasn't been specified yet: @@ -2838,7 +2838,7 @@ public String refresh() { } if (dataset.getId() != null) { //retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionById(dataset.getId(), version); - dataset = datasetService.find(dataset.getId()); + dataset = datasetService.findDeep(dataset.getId()); retrieveDatasetVersionResponse = datasetVersionService.selectRequestedVersion(dataset.getVersions(), version); } else if (versionId != null) { retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionByVersionId(versionId); @@ -2867,8 +2867,8 @@ public String refresh() { } if (readOnly) { - AuthenticatedUser au = session.getUser() instanceof AuthenticatedUser ? (AuthenticatedUser) session.getUser() : null; - datafileService.findFileMetadataOptimizedExperimental(dataset, workingVersion, au); + //AuthenticatedUser au = session.getUser() instanceof AuthenticatedUser ? (AuthenticatedUser) session.getUser() : null; + //datafileService.findFileMetadataOptimizedExperimental(dataset, workingVersion, au); } fileMetadatasSearch = selectFileMetadatasForDisplay(); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index bf36fb469bd..418ee8d0051 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -38,6 +38,7 @@ import javax.ejb.TransactionAttributeType; import javax.inject.Named; import javax.persistence.EntityManager; +import javax.persistence.LockModeType; import javax.persistence.NoResultException; import javax.persistence.PersistenceContext; import javax.persistence.Query; @@ -105,6 +106,28 @@ public Dataset find(Object pk) { return em.find(Dataset.class, pk); } + public Dataset findDeep(Object pk) { + return (Dataset) em.createNamedQuery("Dataset.findById") + .setParameter("id", pk) + .setHint("eclipselink.left-join-fetch", "o.files.ingestRequest") + .setHint("eclipselink.left-join-fetch", "o.files.thumbnailForDataset") + .setHint("eclipselink.left-join-fetch", "o.files.dataTables") + .setHint("eclipselink.left-join-fetch", "o.files.auxiliaryFiles") + .setHint("eclipselink.left-join-fetch", "o.files.ingestReports") + .setHint("eclipselink.left-join-fetch", "o.files.dataFileTags") + .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas") + .setHint("eclipselink.left-join-fetch", "o.files.guestbookResponses") + .setHint("eclipselink.left-join-fetch", "o.files.embargo") + .setHint("eclipselink.left-join-fetch", "o.files.fileAccessRequests") + .setHint("eclipselink.left-join-fetch", "o.files.owner") + .setHint("eclipselink.left-join-fetch", "o.files.releaseUser") + .setHint("eclipselink.left-join-fetch", "o.files.creator") + .setHint("eclipselink.left-join-fetch", "o.files.alternativePersistentIndentifiers") + .setHint("eclipselink.left-join-fetch", "o.files.roleAssignments") + .setLockMode(LockModeType.NONE) + .getSingleResult(); + } + public List findByOwnerId(Long ownerId) { return findByOwnerId(ownerId, false); } From 87c7383c646de0756503cc6116e13f3d47f0ad31 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 11 May 2023 13:03:07 -0400 Subject: [PATCH 0143/1525] More fixes for the command. #9361 --- .../impl/CreateNewDataFilesCommand.java | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index b7ceddcd8bf..6f582a4c015 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -118,7 +118,6 @@ public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion versi @Override public CreateDataFileResult execute(CommandContext ctxt) throws CommandException { - logger.info("entering command.execute();"); List datafiles = new ArrayList<>(); //When there is no checksum/checksumtype being sent (normal upload, needs to be calculated), set the type to the current default @@ -135,8 +134,6 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException Long storageQuotaLimit = null; if (ctxt.systemConfig().isStorageQuotasEnforced()) { - //storageQuotaLimit = ctxt.files().getClass()...; - //UserStorageQuota quota = ctxt.files().getUserStorageQuota(super.getRequest().getAuthenticatedUser(), this.version.getDataset()); if (quota != null) { storageQuotaLimit = quota.getRemainingQuotaInBytes(); } @@ -220,9 +217,11 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } DataFile datafile = null; + long fileSize = 0L; try { uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile())); File unZippedTempFile = saveInputStreamInTempFile(uncompressedIn, fileSizeLimit, storageQuotaLimit); + fileSize = unZippedTempFile.length(); datafile = FileUtil.createSingleDataFile(version, unZippedTempFile, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT, ctxt.systemConfig().getFileFixityChecksumAlgorithm()); } catch (IOException | FileExceedsMaxSizeException | FileExceedsStorageQuotaException ioex) { // it looks like we simply skip the file silently, if its uncompressed size @@ -253,7 +252,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException datafiles.add(datafile); // Update quota if present if (quota != null) { - quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() - datafile.getFilesize()); + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + fileSize); } return CreateDataFileResult.success(fileName, finalType, datafiles); } @@ -539,7 +538,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException logger.severe("Processing of zipped shapefile failed."); return CreateDataFileResult.error(fileName, finalType); } - Long storageQuotaLimitForRezippedFiles = storageQuotaLimit; + long combinedRezippedFileSize = 0L; try { @@ -551,7 +550,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException continue; } - File unZippedShapeTempFile = saveInputStreamInTempFile(finalFileInputStream, fileSizeLimit, storageQuotaLimitForRezippedFiles); + File unZippedShapeTempFile = saveInputStreamInTempFile(finalFileInputStream, fileSizeLimit, storageQuotaLimit != null ? storageQuotaLimit - combinedRezippedFileSize : null); DataFile new_datafile = FileUtil.createSingleDataFile(version, unZippedShapeTempFile, finalFile.getName(), finalType, ctxt.systemConfig().getFileFixityChecksumAlgorithm()); String directoryName = null; @@ -569,10 +568,8 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } if (new_datafile != null) { datafiles.add(new_datafile); + combinedRezippedFileSize += unZippedShapeTempFile.length(); // todo: can this new_datafile be null? - if (storageQuotaLimitForRezippedFiles != null) { - storageQuotaLimitForRezippedFiles = storageQuotaLimitForRezippedFiles - new_datafile.getFilesize(); - } } else { logger.severe("Could not add part of rezipped shapefile. new_datafile was null: " + finalFile.getName()); } @@ -615,7 +612,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } // update the quota object: if (quota != null) { - quota.setTotalUsageInBytes(storageQuotaLimitForRezippedFiles); + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + combinedRezippedFileSize); } return CreateDataFileResult.success(fileName, finalType, datafiles); } else { @@ -686,7 +683,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // Update quota (may not be necessary in the context of direct upload - ?) if (quota != null) { - quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() - datafile.getFilesize()); + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + fileSize); } return CreateDataFileResult.success(fileName, finalType, datafiles); } From e3751bd32bded7aa1ea6b387979259d1fc25f342 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 11 May 2023 19:08:00 +0200 Subject: [PATCH 0144/1525] attempt at fixing the integration test --- .../java/edu/harvard/iq/dataverse/api/SearchIT.java | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java index 99e9409e3d8..f3def90a005 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java @@ -747,6 +747,7 @@ public void testIdentifier() { System.out.println("identifier: " + identifier); String searchPart = identifier.replace("FK2/", ""); + UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5); Response searchUnpublished = UtilIT.search(searchPart, apiToken); searchUnpublished.prettyPrint(); searchUnpublished.then().assertThat() @@ -762,7 +763,7 @@ public void testIdentifier() { .statusCode(OK.getStatusCode()); searchPart = identifier.replace("FK2/", ""); - UtilIT.sleepForReindex(identifier, apiToken, 5); + UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5); Response searchTargeted = UtilIT.search("dsPersistentId:" + searchPart, apiToken); searchTargeted.prettyPrint(); searchTargeted.then().assertThat() @@ -1007,25 +1008,29 @@ public void testSubtreePermissions() { // TODO: investigate if this is a bug that nothing was found. .body("data.total_count", CoreMatchers.equalTo(0)); + UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5); Response searchUnpublishedRootSubtreeForDataset = UtilIT.search(identifier.replace("FK2/", ""), apiToken, "&subtree=root"); searchUnpublishedRootSubtreeForDataset.prettyPrint(); searchUnpublishedRootSubtreeForDataset.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.total_count", CoreMatchers.equalTo(1)); + UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5); Response searchUnpublishedRootSubtreeForDatasetNoAPI = UtilIT.search(identifier.replace("FK2/", ""), null, "&subtree=root"); searchUnpublishedRootSubtreeForDatasetNoAPI.prettyPrint(); searchUnpublishedRootSubtreeForDatasetNoAPI.then().assertThat() .statusCode(OK.getStatusCode()) // TODO: investigate if this is a bug that nothing was found. .body("data.total_count", CoreMatchers.equalTo(0)); - + + UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5); Response searchUnpublishedNoSubtreeForDataset = UtilIT.search(identifier.replace("FK2/", ""), apiToken, ""); searchUnpublishedNoSubtreeForDataset.prettyPrint(); searchUnpublishedNoSubtreeForDataset.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.total_count", CoreMatchers.equalTo(1)); + UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5); Response searchUnpublishedNoSubtreeForDatasetNoAPI = UtilIT.search(identifier.replace("FK2/", ""), null, ""); searchUnpublishedNoSubtreeForDatasetNoAPI.prettyPrint(); searchUnpublishedNoSubtreeForDatasetNoAPI.then().assertThat() @@ -1075,12 +1080,14 @@ public void testSubtreePermissions() { .statusCode(OK.getStatusCode()) .body("data.total_count", CoreMatchers.equalTo(2)); + UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5); Response searchPublishedRootSubtreeForDataset = UtilIT.search(identifier.replace("FK2/", ""), apiToken, "&subtree=root"); searchPublishedRootSubtreeForDataset.prettyPrint(); searchPublishedRootSubtreeForDataset.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.total_count", CoreMatchers.equalTo(1)); - + + UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5); Response searchPublishedRootSubtreeForDatasetNoAPI = UtilIT.search(identifier.replace("FK2/", ""), null, "&subtree=root"); searchPublishedRootSubtreeForDatasetNoAPI.prettyPrint(); searchPublishedRootSubtreeForDatasetNoAPI.then().assertThat() From dbd97ff1c9c7c19ec56270fdfcef7e11056a7778 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 11 May 2023 16:38:49 -0400 Subject: [PATCH 0145/1525] develop + title --- .../api/imports/ImportDDIServiceBean.java | 33 +- .../dataverse/export/ddi/DdiExportUtil.java | 481 +++++++++++------- .../export/openaire/OpenAireExportUtil.java | 243 +++------ 3 files changed, 393 insertions(+), 364 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index 458803e0c92..bafd7267acb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -1266,24 +1266,26 @@ private void parseVersionNumber(DatasetVersionDTO dvDTO, String versionNumber) { } - private void processSerStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) throws XMLStreamException { - FieldDTO seriesName=null; - FieldDTO seriesInformation=null; - for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { + private void processSerStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) throws XMLStreamException { + FieldDTO seriesInformation = null; + FieldDTO seriesName = null; + for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { if (event == XMLStreamConstants.START_ELEMENT) { + if (xmlr.getLocalName().equals("serInfo")) { + seriesInformation = FieldDTO.createPrimitiveFieldDTO("seriesInformation", parseText(xmlr)); + } if (xmlr.getLocalName().equals("serName")) { - seriesName = FieldDTO.createPrimitiveFieldDTO("seriesName", parseText(xmlr)); - - } else if (xmlr.getLocalName().equals("serInfo")) { - seriesInformation=FieldDTO.createPrimitiveFieldDTO("seriesInformation", parseText(xmlr) ); + seriesName = FieldDTO.createPrimitiveFieldDTO("seriesName", parseText(xmlr)); } } else if (event == XMLStreamConstants.END_ELEMENT) { if (xmlr.getLocalName().equals("serStmt")) { - citation.getFields().add(FieldDTO.createCompoundFieldDTO("series",seriesName,seriesInformation )); + if (seriesInformation != null || seriesName != null) { + citation.addField(FieldDTO.createMultipleCompoundFieldDTO("series", seriesName, seriesInformation )); + } return; } } - } + } } private void processDistStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) throws XMLStreamException { @@ -1337,7 +1339,6 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th List> producers = new ArrayList<>(); List> grants = new ArrayList<>(); List> software = new ArrayList<>(); - List prodPlac = new ArrayList<>(); for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { if (event == XMLStreamConstants.START_ELEMENT) { @@ -1353,7 +1354,9 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th } else if (xmlr.getLocalName().equals("prodDate")) { citation.getFields().add(FieldDTO.createPrimitiveFieldDTO("productionDate", parseDate(xmlr, "prodDate"))); } else if (xmlr.getLocalName().equals("prodPlac")) { + List prodPlac = new ArrayList<>(); prodPlac.add(parseText(xmlr, "prodPlac")); + citation.getFields().add(FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.productionPlace, prodPlac)); } else if (xmlr.getLocalName().equals("software")) { HashSet set = new HashSet<>(); addToSet(set,"softwareVersion", xmlr.getAttributeValue(null, "version")); @@ -1386,9 +1389,6 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th if (producers.size()>0) { citation.getFields().add(FieldDTO.createMultipleCompoundFieldDTO("producer", producers)); } - if (prodPlac.size() > 0) { - citation.getFields().add(FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.productionPlace, prodPlac)); - } return; } } @@ -1437,8 +1437,9 @@ private void processTitlStmt(XMLStreamReader xmlr, DatasetDTO datasetDTO) throws if (otherIds.size()>0) { citation.addField(FieldDTO.createMultipleCompoundFieldDTO("otherId", otherIds)); } - if (altTitles.size()>0) { - citation.addField(FieldDTO.createMultiplePrimitiveFieldDTO("alternativeTitle", altTitles)); + if (!altTitles.isEmpty()) { + FieldDTO field = FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.alternativeTitle, altTitles); + citation.getFields().add(field); } return; } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index 4ea90ea6199..a647c2a6f2a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -30,6 +30,7 @@ import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_TAG; import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_UNF; import edu.harvard.iq.dataverse.export.DDIExporter; +import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -93,7 +94,6 @@ public class DdiExportUtil { public static final String CITATION_BLOCK_NAME = "citation"; public static String datasetDtoAsJson2ddi(String datasetDtoAsJson) { - logger.fine(JsonUtil.prettyPrint(datasetDtoAsJson)); Gson gson = new Gson(); DatasetDTO datasetDto = gson.fromJson(datasetDtoAsJson, DatasetDTO.class); try { @@ -181,7 +181,7 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto) String pidUri = pid; //Some tests don't send real PIDs - don't try to get their URL form if(!pidUri.equals("null:null/null")) { - pidUri= new GlobalId(persistentProtocol + ":" + persistentAuthority + "/" + persistentId).toURL().toString(); + pidUri= PidUtil.parseAsGlobalID(persistentProtocol, persistentAuthority, persistentId).asURL(); } // The "persistentAgency" tag is used for the "agency" attribute of the // ddi section; back in the DVN3 days we used "handle" and "DOI" @@ -202,12 +202,12 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto) writeFullElement(xmlw, "titl", dto2Primitive(version, DatasetFieldConstant.title), datasetDto.getMetadataLanguage()); writeFullElement(xmlw, "subTitl", dto2Primitive(version, DatasetFieldConstant.subTitle)); - + //writeFullElement(xmlw, "altTitl", dto2Primitive(version, DatasetFieldConstant.alternativeTitle)); FieldDTO altField = dto2FieldDTO( version, DatasetFieldConstant.alternativeTitle, "citation" ); if (altField != null) { writeMultipleElement(xmlw, "altTitl", altField, datasetDto.getMetadataLanguage()); } - + xmlw.writeStartElement("IDNo"); writeAttribute(xmlw, "agency", persistentAgency); @@ -239,9 +239,11 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto) } writeDistributorsElement(xmlw, version, datasetDto.getMetadataLanguage()); writeContactsElement(xmlw, version); - writeFullElement(xmlw, "distDate", dto2Primitive(version, DatasetFieldConstant.distributionDate)); + /* per SCHEMA, depositr comes before depDate! - L.A. */ writeFullElement(xmlw, "depositr", dto2Primitive(version, DatasetFieldConstant.depositor)); + /* ... and depDate comes before distDate - L.A. */ writeFullElement(xmlw, "depDate", dto2Primitive(version, DatasetFieldConstant.dateOfDeposit)); + writeFullElement(xmlw, "distDate", dto2Primitive(version, DatasetFieldConstant.distributionDate)); xmlw.writeEndElement(); // diststmt @@ -294,23 +296,16 @@ private static void writeOtherStudyMaterial(XMLStreamWriter xmlw , DatasetVersio xmlw.writeEndElement(); //othrStdyMat } + /* + + + + + + */ private static void writeDataAccess(XMLStreamWriter xmlw , DatasetVersionDTO version) throws XMLStreamException { xmlw.writeStartElement("dataAccs"); - if (version.getTermsOfUse() != null && !version.getTermsOfUse().trim().equals("")) { - xmlw.writeStartElement("notes"); - writeAttribute(xmlw, "type", NOTE_TYPE_TERMS_OF_USE); - writeAttribute(xmlw, "level", LEVEL_DV); - xmlw.writeCharacters(version.getTermsOfUse()); - xmlw.writeEndElement(); //notes - } - if (version.getTermsOfAccess() != null && !version.getTermsOfAccess().trim().equals("")) { - xmlw.writeStartElement("notes"); - writeAttribute(xmlw, "type", NOTE_TYPE_TERMS_OF_ACCESS); - writeAttribute(xmlw, "level", LEVEL_DV); - xmlw.writeCharacters(version.getTermsOfAccess()); - xmlw.writeEndElement(); //notes - } - + xmlw.writeStartElement("setAvail"); writeFullElement(xmlw, "accsPlac", version.getDataAccessPlace()); writeFullElement(xmlw, "origArch", version.getOriginalArchive()); @@ -318,6 +313,7 @@ private static void writeDataAccess(XMLStreamWriter xmlw , DatasetVersionDTO ver writeFullElement(xmlw, "collSize", version.getSizeOfCollection()); writeFullElement(xmlw, "complete", version.getStudyCompletion()); xmlw.writeEndElement(); //setAvail + xmlw.writeStartElement("useStmt"); writeFullElement(xmlw, "confDec", version.getConfidentialityDeclaration()); writeFullElement(xmlw, "specPerm", version.getSpecialPermissions()); @@ -328,6 +324,15 @@ private static void writeDataAccess(XMLStreamWriter xmlw , DatasetVersionDTO ver writeFullElement(xmlw, "conditions", version.getConditions()); writeFullElement(xmlw, "disclaimer", version.getDisclaimer()); xmlw.writeEndElement(); //useStmt + + /* any s: */ + if (version.getTermsOfAccess() != null && !version.getTermsOfAccess().trim().equals("")) { + xmlw.writeStartElement("notes"); + writeAttribute(xmlw, "type", NOTE_TYPE_TERMS_OF_ACCESS); + writeAttribute(xmlw, "level", LEVEL_DV); + xmlw.writeCharacters(version.getTermsOfAccess()); + xmlw.writeEndElement(); //notes + } xmlw.writeEndElement(); //dataAccs } @@ -388,141 +393,222 @@ private static void writeVersionStatement(XMLStreamWriter xmlw, DatasetVersionDT xmlw.writeEndElement(); // verStmt } + /* From the DDI 2.5 schema: + + + + + + + + + + + + + */ private static void writeSummaryDescriptionElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO, String lang) throws XMLStreamException { xmlw.writeStartElement("sumDscr"); + FieldDTO timePeriodCoveredDTO = null; + FieldDTO dateOfCollectionDTO = null; + FieldDTO geographicCoverageDTO = null; + FieldDTO geographicBoundingBoxDTO = null; + FieldDTO unitOfAnalysisDTO = null; + FieldDTO universeDTO = null; + FieldDTO kindOfDataDTO = null; + for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) { String key = entry.getKey(); MetadataBlockDTO value = entry.getValue(); + if ("citation".equals(key)) { - Integer per = 0; - Integer coll = 0; for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.timePeriodCovered.equals(fieldDTO.getTypeName())) { - String dateValStart = ""; - String dateValEnd = ""; - for (HashSet foo : fieldDTO.getMultipleCompound()) { - per++; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { - FieldDTO next = iterator.next(); - if (DatasetFieldConstant.timePeriodCoveredStart.equals(next.getTypeName())) { - dateValStart = next.getSinglePrimitive(); - } - if (DatasetFieldConstant.timePeriodCoveredEnd.equals(next.getTypeName())) { - dateValEnd = next.getSinglePrimitive(); - } - } - if (!dateValStart.isEmpty()) { - writeDateElement(xmlw, "timePrd", "P"+ per.toString(), "start", dateValStart ); - } - if (!dateValEnd.isEmpty()) { - writeDateElement(xmlw, "timePrd", "P"+ per.toString(), "end", dateValEnd ); - } - } + timePeriodCoveredDTO = fieldDTO; } + if (DatasetFieldConstant.dateOfCollection.equals(fieldDTO.getTypeName())) { - String dateValStart = ""; - String dateValEnd = ""; - for (HashSet foo : fieldDTO.getMultipleCompound()) { - coll++; - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { - FieldDTO next = iterator.next(); - if (DatasetFieldConstant.dateOfCollectionStart.equals(next.getTypeName())) { - dateValStart = next.getSinglePrimitive(); - } - if (DatasetFieldConstant.dateOfCollectionEnd.equals(next.getTypeName())) { - dateValEnd = next.getSinglePrimitive(); - } - } - if (!dateValStart.isEmpty()) { - writeDateElement(xmlw, "collDate", "P"+ coll.toString(), "start", dateValStart ); - } - if (!dateValEnd.isEmpty()) { - writeDateElement(xmlw, "collDate", "P"+ coll.toString(), "end", dateValEnd ); - } - } + dateOfCollectionDTO = fieldDTO; } + if (DatasetFieldConstant.kindOfData.equals(fieldDTO.getTypeName())) { - writeMultipleElement(xmlw, "dataKind", fieldDTO, lang); + kindOfDataDTO = fieldDTO; } } } - - if("geospatial".equals(key)){ + + if ("geospatial".equals(key)) { for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.geographicCoverage.equals(fieldDTO.getTypeName())) { - - for (HashSet foo : fieldDTO.getMultipleCompound()) { - HashMap geoMap = new HashMap<>(); - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { - FieldDTO next = iterator.next(); - if (DatasetFieldConstant.country.equals(next.getTypeName())) { - geoMap.put("country", next.getSinglePrimitive()); - } - if (DatasetFieldConstant.city.equals(next.getTypeName())) { - geoMap.put("city", next.getSinglePrimitive()); - } - if (DatasetFieldConstant.state.equals(next.getTypeName())) { - geoMap.put("state", next.getSinglePrimitive()); - } - if (DatasetFieldConstant.otherGeographicCoverage.equals(next.getTypeName())) { - geoMap.put("otherGeographicCoverage", next.getSinglePrimitive()); - } - } - - if (geoMap.get("country") != null) { - writeFullElement(xmlw, "nation", geoMap.get("country")); - } - if (geoMap.get("city") != null) { - writeFullElement(xmlw, "geogCover", geoMap.get("city")); - } - if (geoMap.get("state") != null) { - writeFullElement(xmlw, "geogCover", geoMap.get("state")); - } - if (geoMap.get("otherGeographicCoverage") != null) { - writeFullElement(xmlw, "geogCover", geoMap.get("otherGeographicCoverage")); - } - - } + geographicCoverageDTO = fieldDTO; } if (DatasetFieldConstant.geographicBoundingBox.equals(fieldDTO.getTypeName())) { - for (HashSet foo : fieldDTO.getMultipleCompound()) { - xmlw.writeStartElement("geoBndBox"); - for (Iterator iterator = foo.iterator(); iterator.hasNext();) { - FieldDTO next = iterator.next(); - if (DatasetFieldConstant.westLongitude.equals(next.getTypeName())) { - writeFullElement(xmlw, "westBL", next.getSinglePrimitive()); - } - if (DatasetFieldConstant.eastLongitude.equals(next.getTypeName())) { - writeFullElement(xmlw, "eastBL", next.getSinglePrimitive()); - } - if (DatasetFieldConstant.northLatitude.equals(next.getTypeName())) { - writeFullElement(xmlw, "northBL", next.getSinglePrimitive()); - } - if (DatasetFieldConstant.southLatitude.equals(next.getTypeName())) { - writeFullElement(xmlw, "southBL", next.getSinglePrimitive()); - } - - } - xmlw.writeEndElement(); - } + geographicBoundingBoxDTO = fieldDTO; } } - writeFullElementList(xmlw, "geogUnit", dto2PrimitiveList(datasetVersionDTO, DatasetFieldConstant.geographicUnit)); } - if("socialscience".equals(key)){ + if ("socialscience".equals(key)) { for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.universe.equals(fieldDTO.getTypeName())) { - writeMultipleElement(xmlw, "universe", fieldDTO, lang); + universeDTO = fieldDTO; } if (DatasetFieldConstant.unitOfAnalysis.equals(fieldDTO.getTypeName())) { - writeI18NElementList(xmlw, "anlyUnit", fieldDTO.getMultipleVocab(), "unitOfAnalysis", fieldDTO.getTypeClass(), "socialscience", lang); + unitOfAnalysisDTO = fieldDTO; + } + } + } + } + /* Finally, we can write the fields we have collected, in the correct order: -L.A.*/ + + if (timePeriodCoveredDTO != null) { + String dateValStart = ""; + String dateValEnd = ""; + Integer per = 0; + for (HashSet foo : timePeriodCoveredDTO.getMultipleCompound()) { + per++; + for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + FieldDTO next = iterator.next(); + if (DatasetFieldConstant.timePeriodCoveredStart.equals(next.getTypeName())) { + dateValStart = next.getSinglePrimitive(); + } + if (DatasetFieldConstant.timePeriodCoveredEnd.equals(next.getTypeName())) { + dateValEnd = next.getSinglePrimitive(); + } + } + if (!dateValStart.isEmpty()) { + writeDateElement(xmlw, "timePrd", "P" + per.toString(), "start", dateValStart); + } + if (!dateValEnd.isEmpty()) { + writeDateElement(xmlw, "timePrd", "P" + per.toString(), "end", dateValEnd); + } + } + } + + if (dateOfCollectionDTO != null) { + String dateValStart = ""; + String dateValEnd = ""; + Integer coll = 0; + for (HashSet foo : dateOfCollectionDTO.getMultipleCompound()) { + coll++; + for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + FieldDTO next = iterator.next(); + if (DatasetFieldConstant.dateOfCollectionStart.equals(next.getTypeName())) { + dateValStart = next.getSinglePrimitive(); + } + if (DatasetFieldConstant.dateOfCollectionEnd.equals(next.getTypeName())) { + dateValEnd = next.getSinglePrimitive(); } } + if (!dateValStart.isEmpty()) { + writeDateElement(xmlw, "collDate", "P" + coll.toString(), "start", dateValStart); + } + if (!dateValEnd.isEmpty()) { + writeDateElement(xmlw, "collDate", "P" + coll.toString(), "end", dateValEnd); + } } } + + /* and come next, in that order. -L.A. */ + if (geographicCoverageDTO != null) { + + List nationList = new ArrayList<>(); + List geogCoverList = new ArrayList<>(); + + for (HashSet foo : geographicCoverageDTO.getMultipleCompound()) { + for (Iterator iterator = foo.iterator(); iterator.hasNext();) { + FieldDTO next = iterator.next(); + /* our "country" field maps 1:1 to the DDI "": */ + if (DatasetFieldConstant.country.equals(next.getTypeName())) { + nationList.add(next.getSinglePrimitive()); + } + /* city, state and otherGeographicCoverage all exported as "": */ + if (DatasetFieldConstant.city.equals(next.getTypeName()) + || DatasetFieldConstant.state.equals(next.getTypeName()) + || DatasetFieldConstant.otherGeographicCoverage.equals(next.getTypeName())) { + geogCoverList.add(next.getSinglePrimitive()); + } + } + } + + /** + * And now we can write all the fields encountered, first the + * "" entries, then all the "" ones: + */ + for (String nationEntry : nationList) { + writeFullElement(xmlw, "nation", nationEntry); + } + for (String geogCoverEntry : geogCoverList) { + writeFullElement(xmlw, "geogCover", geogCoverEntry); + } + } + + writeFullElementList(xmlw, "geogUnit", dto2PrimitiveList(datasetVersionDTO, DatasetFieldConstant.geographicUnit)); + + /* Only 1 geoBndBox is allowed in the DDI. + So, I'm just going to arbitrarily use the first one, and ignore the rest! -L.A. */ + if (geographicBoundingBoxDTO != null) { + HashSet bndBoxSet = geographicBoundingBoxDTO.getMultipleCompound().get(0); + xmlw.writeStartElement("geoBndBox"); + HashMap geoBndBoxMap = new HashMap<>(); + for (FieldDTO next : bndBoxSet) { + if (DatasetFieldConstant.westLongitude.equals(next.getTypeName())) { + geoBndBoxMap.put("westBL", next.getSinglePrimitive()); + } + if (DatasetFieldConstant.eastLongitude.equals(next.getTypeName())) { + geoBndBoxMap.put("eastBL", next.getSinglePrimitive()); + } + if (DatasetFieldConstant.northLatitude.equals(next.getTypeName())) { + geoBndBoxMap.put("northBL", next.getSinglePrimitive()); + } + if (DatasetFieldConstant.southLatitude.equals(next.getTypeName())) { + geoBndBoxMap.put("southBL", next.getSinglePrimitive()); + } + } + + /* Once again, order is important! */ + /* + + + + + + + */ + if (geoBndBoxMap.get("westBL") != null) { + writeFullElement(xmlw, "westBL", geoBndBoxMap.get("westBL")); + } + if (geoBndBoxMap.get("eastBL") != null) { + writeFullElement(xmlw, "eastBL", geoBndBoxMap.get("eastBL")); + } + if (geoBndBoxMap.get("southBL") != null) { + writeFullElement(xmlw, "southBL", geoBndBoxMap.get("southBL")); + } + if (geoBndBoxMap.get("northBL") != null) { + writeFullElement(xmlw, "northBL", geoBndBoxMap.get("northBL")); + } + + xmlw.writeEndElement(); + } + + /* analyUnit: */ + if (unitOfAnalysisDTO != null) { + writeI18NElementList(xmlw, "anlyUnit", unitOfAnalysisDTO.getMultipleVocab(), "unitOfAnalysis", unitOfAnalysisDTO.getTypeClass(), "socialscience", lang); + + } + + /* universe: */ + if (universeDTO != null) { + writeMultipleElement(xmlw, "universe", universeDTO, lang); + } + + /* finally, any "kind of data" entries: */ + if (kindOfDataDTO != null) { + writeMultipleElement(xmlw, "dataKind", kindOfDataDTO, lang); + } + xmlw.writeEndElement(); //sumDscr } @@ -544,6 +630,29 @@ private static void writeDateElement(XMLStreamWriter xmlw, String element, Strin } + /** + * Again, is an xs:sequence - order is important and must follow + * the schema. -L.A. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + */ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO version, String lang) throws XMLStreamException{ xmlw.writeStartElement("method"); xmlw.writeStartElement("dataColl"); @@ -557,13 +666,7 @@ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO writeI18NElement(xmlw, "deviat", version, DatasetFieldConstant.deviationsFromSampleDesign, lang); - xmlw.writeStartElement("sources"); - writeFullElementList(xmlw, "dataSrc", dto2PrimitiveList(version, DatasetFieldConstant.dataSources)); - writeI18NElement(xmlw, "srcOrig", version, DatasetFieldConstant.originOfSources, lang); - writeI18NElement(xmlw, "srcChar", version, DatasetFieldConstant.characteristicOfSources, lang); - writeI18NElement(xmlw, "srcDocu", version, DatasetFieldConstant.accessToSources, lang); - xmlw.writeEndElement(); //sources - + /* comes before : */ FieldDTO collModeFieldDTO = dto2FieldDTO(version, DatasetFieldConstant.collectionMode, "socialscience"); if (collModeFieldDTO != null) { // This field was made multiple as of 5.10 @@ -575,21 +678,33 @@ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO writeI18NElement(xmlw, "collMode", version, DatasetFieldConstant.collectionMode, lang); } } + /* and so does : */ writeI18NElement(xmlw, "resInstru", version, DatasetFieldConstant.researchInstrument, lang); + xmlw.writeStartElement("sources"); + writeFullElementList(xmlw, "dataSrc", dto2PrimitiveList(version, DatasetFieldConstant.dataSources)); + writeI18NElement(xmlw, "srcOrig", version, DatasetFieldConstant.originOfSources, lang); + writeI18NElement(xmlw, "srcChar", version, DatasetFieldConstant.characteristicOfSources, lang); + writeI18NElement(xmlw, "srcDocu", version, DatasetFieldConstant.accessToSources, lang); + xmlw.writeEndElement(); //sources + + writeI18NElement(xmlw, "collSitu", version, DatasetFieldConstant.dataCollectionSituation, lang); writeI18NElement(xmlw, "actMin", version, DatasetFieldConstant.actionsToMinimizeLoss, lang); - writeI18NElement(xmlw, "conOps", version, DatasetFieldConstant.controlOperations, lang); + /* "" has the uppercase C: */ + writeI18NElement(xmlw, "ConOps", version, DatasetFieldConstant.controlOperations, lang); writeI18NElement(xmlw, "weight", version, DatasetFieldConstant.weighting, lang); writeI18NElement(xmlw, "cleanOps", version, DatasetFieldConstant.cleaningOperations, lang); xmlw.writeEndElement(); //dataColl + /* before : */ + writeNotesElement(xmlw, version); + xmlw.writeStartElement("anlyInfo"); //writeFullElement(xmlw, "anylInfo", dto2Primitive(version, DatasetFieldConstant.datasetLevelErrorNotes)); writeI18NElement(xmlw, "respRate", version, DatasetFieldConstant.responseRate, lang); writeI18NElement(xmlw, "EstSmpErr", version, DatasetFieldConstant.samplingErrorEstimates, lang); writeI18NElement(xmlw, "dataAppr", version, DatasetFieldConstant.otherDataAppraisal, lang); xmlw.writeEndElement(); //anlyInfo - writeNotesElement(xmlw, version); xmlw.writeEndElement();//method } @@ -852,7 +967,6 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT String producerAffiliation = ""; String producerAbbreviation = ""; String producerLogo = ""; - String producerURL = ""; for (Iterator iterator = foo.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.producerName.equals(next.getTypeName())) { @@ -867,10 +981,6 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT if (DatasetFieldConstant.producerLogo.equals(next.getTypeName())) { producerLogo = next.getSinglePrimitive(); } - if (DatasetFieldConstant.producerURL.equals(next.getTypeName())) { - producerURL = next.getSinglePrimitive(); - - } } if (!producerName.isEmpty()) { xmlw.writeStartElement("producer"); @@ -880,12 +990,9 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT if (!producerAbbreviation.isEmpty()) { writeAttribute(xmlw, "abbr", producerAbbreviation); } - if (!producerLogo.isEmpty()) { + /*if (!producerLogo.isEmpty()) { writeAttribute(xmlw, "role", producerLogo); - } - if (!producerURL.isEmpty()) { - writeAttribute(xmlw, "URI", producerURL); - } + }*/ xmlw.writeCharacters(producerName); xmlw.writeEndElement(); //AuthEnty } @@ -896,12 +1003,10 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT } } writeFullElement(xmlw, "prodDate", dto2Primitive(version, DatasetFieldConstant.productionDate)); - - FieldDTO prodPlac = dto2FieldDTO( version, DatasetFieldConstant.productionPlace, "citation" ); - if (prodPlac != null) { - writeMultipleElement(xmlw, "prodPlac", prodPlac, null); - } - + // productionPlace was made multiple as of 5.14: + // (a quick backward compatibility check was added to dto2PrimitiveList(), + // see the method for details) + writeFullElementList(xmlw, "prodPlac", dto2PrimitiveList(version, DatasetFieldConstant.productionPlace)); writeSoftwareElement(xmlw, version); writeGrantElement(xmlw, version); @@ -921,7 +1026,6 @@ private static void writeDistributorsElement(XMLStreamWriter xmlw, DatasetVersio String distributorAffiliation = ""; String distributorAbbreviation = ""; String distributorURL = ""; - String distributorLogoURL = ""; for (Iterator iterator = foo.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.distributorName.equals(next.getTypeName())) { @@ -936,9 +1040,6 @@ private static void writeDistributorsElement(XMLStreamWriter xmlw, DatasetVersio if (DatasetFieldConstant.distributorURL.equals(next.getTypeName())) { distributorURL = next.getSinglePrimitive(); } - if (DatasetFieldConstant.distributorLogo.equals(next.getTypeName())) { - distributorLogoURL = next.getSinglePrimitive(); - } } if (!distributorName.isEmpty()) { xmlw.writeStartElement("distrbtr"); @@ -954,9 +1055,6 @@ private static void writeDistributorsElement(XMLStreamWriter xmlw, DatasetVersio if (!distributorURL.isEmpty()) { writeAttribute(xmlw, "URI", distributorURL); } - if (!distributorLogoURL.isEmpty()) { - writeAttribute(xmlw, "role", distributorLogoURL); - } xmlw.writeCharacters(distributorName); xmlw.writeEndElement(); //AuthEnty } @@ -1000,16 +1098,33 @@ private static void writeRelPublElement(XMLStreamWriter xmlw, DatasetVersionDTO if (citation != null && !citation.trim().equals("")) { xmlw.writeStartElement("relPubl"); xmlw.writeStartElement("citation"); + /* + + + + + + + + + + + + (In other words - titlStmt is mandatory! -L.A.) + */ + xmlw.writeStartElement("titlStmt"); + writeFullElement(xmlw, "titl", citation); if (IDNo != null && !IDNo.trim().equals("")) { - xmlw.writeStartElement("titlStmt"); + xmlw.writeStartElement("IDNo"); if (IDType != null && !IDType.trim().equals("")) { - xmlw.writeAttribute("agency", IDType ); + xmlw.writeAttribute("agency", IDType); } xmlw.writeCharacters(IDNo); xmlw.writeEndElement(); //IDNo - xmlw.writeEndElement(); // titlStmt } + xmlw.writeEndElement(); // titlStmt + writeFullElement(xmlw,"biblCit",citation); xmlw.writeEndElement(); //citation @@ -1181,33 +1296,34 @@ private static void writeSeriesElement(XMLStreamWriter xmlw, DatasetVersionDTO d for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) { String key = entry.getKey(); MetadataBlockDTO value = entry.getValue(); - if ("citation".equals(key)) { + if ("citation".equals(key)) { for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.series.equals(fieldDTO.getTypeName())) { - xmlw.writeStartElement("serStmt"); String seriesName = ""; String seriesInformation = ""; - Set foo = fieldDTO.getSingleCompound(); + for (HashSet foo : fieldDTO.getMultipleCompound()) { + xmlw.writeStartElement("serStmt"); for (Iterator iterator = foo.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.seriesName.equals(next.getTypeName())) { - seriesName = next.getSinglePrimitive(); + seriesName = next.getSinglePrimitive(); } if (DatasetFieldConstant.seriesInformation.equals(next.getTypeName())) { - seriesInformation = next.getSinglePrimitive(); + seriesInformation = next.getSinglePrimitive(); } } - if (!seriesName.isEmpty()){ - xmlw.writeStartElement("serName"); + if (!seriesName.isEmpty()) { + xmlw.writeStartElement("serName"); xmlw.writeCharacters(seriesName); - xmlw.writeEndElement(); //grantno + xmlw.writeEndElement(); //serName } - if (!seriesInformation.isEmpty()){ - xmlw.writeStartElement("serInfo"); + if (!seriesInformation.isEmpty()) { + xmlw.writeStartElement("serInfo"); xmlw.writeCharacters(seriesInformation); - xmlw.writeEndElement(); //grantno + xmlw.writeEndElement(); //serInfo } - xmlw.writeEndElement(); //serStmt + xmlw.writeEndElement(); //serStmt + } } } } @@ -1234,17 +1350,18 @@ private static void writeTargetSampleElement(XMLStreamWriter xmlw, DatasetVersio actualSize = next.getSinglePrimitive(); } } - - if (!sizeFormula.isEmpty()) { - xmlw.writeStartElement("sampleSizeFormula"); - xmlw.writeCharacters(sizeFormula); - xmlw.writeEndElement(); //sampleSizeFormula - } + /* must come before ! -L.A. */ if (!actualSize.isEmpty()) { xmlw.writeStartElement("sampleSize"); xmlw.writeCharacters(actualSize); xmlw.writeEndElement(); //sampleSize } + if (!sizeFormula.isEmpty()) { + xmlw.writeStartElement("sampleSizeFormula"); + xmlw.writeCharacters(sizeFormula); + xmlw.writeEndElement(); //sampleSizeFormula + } + xmlw.writeEndElement(); // targetSampleSize } } @@ -1356,8 +1473,8 @@ private static void createOtherMatsFromFileMetadatas(XMLStreamWriter xmlw, List< writeAttribute(xmlw, "ID", "f" + fileMetadata.getDataFile().getId()); String dfIdentifier = fileMetadata.getDataFile().getIdentifier(); if (dfIdentifier != null && !dfIdentifier.isEmpty()){ - GlobalId globalId = new GlobalId(fileMetadata.getDataFile()); - writeAttribute(xmlw, "URI", globalId.toURL().toString()); + GlobalId globalId = fileMetadata.getDataFile().getGlobalId(); + writeAttribute(xmlw, "URI", globalId.asURL()); } else { writeAttribute(xmlw, "URI", dataverseUrl + "/api/access/datafile/" + fileMetadata.getDataFile().getId()); } @@ -1432,7 +1549,15 @@ private static List dto2PrimitiveList(DatasetVersionDTO datasetVersionDT MetadataBlockDTO value = entry.getValue(); for (FieldDTO fieldDTO : value.getFields()) { if (datasetFieldTypeName.equals(fieldDTO.getTypeName())) { - return fieldDTO.getMultiplePrimitive(); + // This hack is here to make sure the export does not blow + // up on an instance that upgraded to a Dataverse version + // where a certain primitive has been made multiple, but has + // not yet update the block. + if (fieldDTO.getMultiple() != null && fieldDTO.getMultiple()) { + return fieldDTO.getMultiplePrimitive(); + } else { + return Arrays.asList(fieldDTO.getSinglePrimitive()); + } } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index 34cb7a4e138..334b18f4601 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -1,13 +1,8 @@ package edu.harvard.iq.dataverse.export.openaire; import java.io.OutputStream; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map; -import java.util.Set; +import java.util.*; import java.util.logging.Logger; -import java.util.List; import javax.json.JsonObject; import javax.xml.stream.XMLOutputFactory; @@ -18,13 +13,17 @@ import com.google.gson.Gson; +import edu.harvard.iq.dataverse.DOIServiceBean; import edu.harvard.iq.dataverse.DatasetFieldConstant; import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.HandlenetServiceBean; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.api.dto.DatasetDTO; import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO; import edu.harvard.iq.dataverse.api.dto.FieldDTO; import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO; +import edu.harvard.iq.dataverse.util.PersonOrOrgUtil; +import edu.harvard.iq.dataverse.pidproviders.PidUtil; import edu.harvard.iq.dataverse.util.json.JsonUtil; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -71,7 +70,7 @@ private static void createOpenAire(XMLStreamWriter xmlw, DatasetDTO datasetDto) String persistentAgency = datasetDto.getProtocol(); String persistentAuthority = datasetDto.getAuthority(); String persistentId = datasetDto.getIdentifier(); - GlobalId globalId = new GlobalId(persistentAgency, persistentAuthority, persistentId); + GlobalId globalId = PidUtil.parseAsGlobalID(persistentAgency, persistentAuthority, persistentId); // The sequence is revied using sample: // https://schema.datacite.org/meta/kernel-4.0/example/datacite-example-full-v4.0.xml @@ -83,7 +82,7 @@ private static void createOpenAire(XMLStreamWriter xmlw, DatasetDTO datasetDto) String language = null; // 1, Identifier (with mandatory type sub-property) (M) - writeIdentifierElement(xmlw, globalId.toURL().toString(), language); + writeIdentifierElement(xmlw, globalId.asURL(), language); // 2, Creator (with optional given name, family name, // name identifier and affiliation sub-properties) (M) @@ -191,10 +190,10 @@ public static void writeIdentifierElement(XMLStreamWriter xmlw, String identifie if (StringUtils.isNotBlank(identifier)) { Map identifier_map = new HashMap(); - if (StringUtils.containsIgnoreCase(identifier, GlobalId.DOI_RESOLVER_URL)) { + if (StringUtils.containsIgnoreCase(identifier, DOIServiceBean.DOI_RESOLVER_URL)) { identifier_map.put("identifierType", "DOI"); identifier = StringUtils.substring(identifier, identifier.indexOf("10.")); - } else if (StringUtils.containsIgnoreCase(identifier, GlobalId.HDL_RESOLVER_URL)) { + } else if (StringUtils.containsIgnoreCase(identifier, HandlenetServiceBean.HDL_RESOLVER_URL)) { identifier_map.put("identifierType", "Handle"); if (StringUtils.contains(identifier, "http")) { identifier = identifier.replace(identifier.substring(0, identifier.indexOf("/") + 2), ""); @@ -250,72 +249,26 @@ public static void writeCreatorsElement(XMLStreamWriter xmlw, DatasetVersionDTO if (StringUtils.isNotBlank(creatorName)) { creator_check = writeOpenTag(xmlw, "creators", creator_check); xmlw.writeStartElement("creator"); // - - boolean nameType_check = false; + Map creator_map = new HashMap(); - if ((StringUtils.containsIgnoreCase(nameIdentifierScheme, "orcid"))) { + JsonObject creatorObj = PersonOrOrgUtil.getPersonOrOrganization(creatorName, false, + StringUtils.containsIgnoreCase(nameIdentifierScheme, "orcid")); + + // creatorName=, + if (creatorObj.getBoolean("isPerson")) { creator_map.put("nameType", "Personal"); - nameType_check = true; - } - // ToDo - the algorithm to determine if this is a Person or Organization here - // has been abstracted into a separate - // edu.harvard.iq.dataverse.util.PersonOrOrgUtil class that could be used here - // to avoid duplication/variants of the algorithm - creatorName = Cleanup.normalize(creatorName); - // Datacite algorithm, https://github.com/IQSS/dataverse/issues/2243#issuecomment-358615313 - if (creatorName.contains(",")) { - String givenName = FirstNames.getInstance().getFirstName(creatorName); - boolean isOrganization = Organizations.getInstance().isOrganization(creatorName); - - // creatorName=, - if (givenName != null && !isOrganization) { - // givenName ok - creator_map.put("nameType", "Personal"); - nameType_check = true; - } else if (isOrganization) { - creator_map.put("nameType", "Organizational"); - nameType_check = false; - } - writeFullElement(xmlw, null, "creatorName", creator_map, creatorName, language); - - if ((nameType_check) && (!creatorName.replaceFirst(",", "").contains(","))) { - // creatorName=, - String[] fullName = creatorName.split(", "); - if (fullName.length == 2) { - givenName = fullName[1]; - String familyName = fullName[0]; - - writeFullElement(xmlw, null, "givenName", null, givenName, language); - writeFullElement(xmlw, null, "familyName", null, familyName, language); - } else { - // It's possible to get here if "Smith," is entered as an author name. - logger.info("Unable to write givenName and familyName based on creatorName '" + creatorName + "'."); - } - } } else { - String givenName = FirstNames.getInstance().getFirstName(creatorName); - boolean isOrganization = Organizations.getInstance().isOrganization(creatorName); - - if (givenName != null && !isOrganization) { - // givenName ok, creatorName= - creator_map.put("nameType", "Personal"); - nameType_check = true; - writeFullElement(xmlw, null, "creatorName", creator_map, creatorName, language); - - String familyName = ""; - if (givenName.length() + 1 < creatorName.length()) { - familyName = creatorName.substring(givenName.length() + 1); - } - - writeFullElement(xmlw, null, "givenName", null, givenName, language); - writeFullElement(xmlw, null, "familyName", null, familyName, language); - } else { - // default - if (isOrganization) { - creator_map.put("nameType", "Organizational"); - } - writeFullElement(xmlw, null, "creatorName", creator_map, creatorName, language); - } + creator_map.put("nameType", "Organizational"); + } + writeFullElement(xmlw, null, "creatorName", creator_map, + creatorObj.getString("fullName"), language); + if (creatorObj.containsKey("givenName")) { + writeFullElement(xmlw, null, "givenName", null, creatorObj.getString("givenName"), + language); + } + if (creatorObj.containsKey("familyName")) { + writeFullElement(xmlw, null, "familyName", null, creatorObj.getString("familyName"), + language); } if (StringUtils.isNotBlank(nameIdentifier)) { @@ -369,10 +322,34 @@ public static void writeTitlesElement(XMLStreamWriter xmlw, DatasetVersionDTO da String subtitle = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.subTitle); title_check = writeTitleElement(xmlw, "Subtitle", subtitle, title_check, language); + //String alternativeTitle = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.alternativeTitle); + //title_check = writeTitleElement(xmlw, "AlternativeTitle", alternativeTitle, title_check, language); title_check = writeMultipleTitleElement(xmlw, "AlternativeTitle", datasetVersionDTO, "citation", title_check, language); + writeEndTag(xmlw, title_check); + } + private static boolean writeMultipleTitleElement(XMLStreamWriter xmlw, String titleType, DatasetVersionDTO datasetVersionDTO, String metadataBlockName, boolean title_check, String language) throws XMLStreamException { + MetadataBlockDTO block = datasetVersionDTO.getMetadataBlocks().get(metadataBlockName); + if (block != null) { + logger.info("Block is not empty"); + List fieldsBlock = block.getFields(); + if (fieldsBlock != null) { + for (FieldDTO fieldDTO : fieldsBlock) { + logger.info(titleType + " " + fieldDTO.getTypeName()); + if (titleType.toLowerCase().equals(fieldDTO.getTypeName().toLowerCase())) { + logger.info("Found Alt title"); + List fields = fieldDTO.getMultiplePrimitive(); + for (String value : fields) { + if (!writeTitleElement(xmlw, titleType, value, title_check, language)) + title_check = false; + } + break; + } + } + } + } - writeEndTag(xmlw, title_check); + return title_check; } /** @@ -405,33 +382,6 @@ private static boolean writeTitleElement(XMLStreamWriter xmlw, String titleType, } return title_check; } - - private static boolean writeMultipleTitleElement(XMLStreamWriter xmlw, String titleType, DatasetVersionDTO datasetVersionDTO, String metadataBlockName, boolean title_check, String language) throws XMLStreamException { - MetadataBlockDTO block = datasetVersionDTO.getMetadataBlocks().get(metadataBlockName); - if (block != null) { - logger.info("Block is not empty"); - List fieldsBlock = block.getFields(); - if (fieldsBlock != null) { - for (FieldDTO fieldDTO : fieldsBlock) { - logger.info(titleType + " " + fieldDTO.getTypeName()); - if (titleType.toLowerCase().equals(fieldDTO.getTypeName().toLowerCase())) { - logger.info("Found Alt title"); - List fields = fieldDTO.getMultiplePrimitive(); - for (String value : fields) { - if (!writeTitleElement(xmlw, titleType, value, title_check, language)) - title_check = false; - } - break; - } - } - } - } - - return title_check; - } - - - /** * 5, PublicationYear (M) @@ -737,61 +687,23 @@ public static void writeContributorElement(XMLStreamWriter xmlw, String contribu boolean nameType_check = false; Map contributor_map = new HashMap(); - // ToDo - the algorithm to determine if this is a Person or Organization here - // has been abstracted into a separate - // edu.harvard.iq.dataverse.util.PersonOrOrgUtil class that could be used here - // to avoid duplication/variants of the algorithm + JsonObject contributorObj = PersonOrOrgUtil.getPersonOrOrganization(contributorName, + ("ContactPerson".equals(contributorType) && !isValidEmailAddress(contributorName)), false); - contributorName = Cleanup.normalize(contributorName); - // Datacite algorithm, https://github.com/IQSS/dataverse/issues/2243#issuecomment-358615313 - if (contributorName.contains(",")) { - String givenName = FirstNames.getInstance().getFirstName(contributorName); - boolean isOrganization = Organizations.getInstance().isOrganization(contributorName); - - // contributorName=, - if (givenName != null && !isOrganization) { - // givenName ok + if (contributorObj.getBoolean("isPerson")) { + if(contributorObj.containsKey("givenName")) { contributor_map.put("nameType", "Personal"); - nameType_check = true; - // re: the above toDo - the ("ContactPerson".equals(contributorType) && - // !isValidEmailAddress(contributorName)) clause in the next line could/should - // be sent as the OrgIfTied boolean parameter - } else if (isOrganization || ("ContactPerson".equals(contributorType) && !isValidEmailAddress(contributorName))) { - contributor_map.put("nameType", "Organizational"); - } - writeFullElement(xmlw, null, "contributorName", contributor_map, contributorName, language); - - if ((nameType_check) && (!contributorName.replaceFirst(",", "").contains(","))) { - // contributorName=, - String[] fullName = contributorName.split(", "); - givenName = fullName[1]; - String familyName = fullName[0]; - - writeFullElement(xmlw, null, "givenName", null, givenName, language); - writeFullElement(xmlw, null, "familyName", null, familyName, language); } } else { - String givenName = FirstNames.getInstance().getFirstName(contributorName); - boolean isOrganization = Organizations.getInstance().isOrganization(contributorName); - - if (givenName != null && !isOrganization) { - contributor_map.put("nameType", "Personal"); - writeFullElement(xmlw, null, "contributorName", contributor_map, contributorName, language); - - String familyName = ""; - if (givenName.length() + 1 < contributorName.length()) { - familyName = contributorName.substring(givenName.length() + 1); - } + contributor_map.put("nameType", "Organizational"); + } + writeFullElement(xmlw, null, "contributorName", contributor_map, contributorName, language); - writeFullElement(xmlw, null, "givenName", null, givenName, language); - writeFullElement(xmlw, null, "familyName", null, familyName, language); - } else { - // default - if (isOrganization || ("ContactPerson".equals(contributorType) && !isValidEmailAddress(contributorName))) { - contributor_map.put("nameType", "Organizational"); - } - writeFullElement(xmlw, null, "contributorName", contributor_map, contributorName, language); - } + if (contributorObj.containsKey("givenName")) { + writeFullElement(xmlw, null, "givenName", null, contributorObj.getString("givenName"), language); + } + if (contributorObj.containsKey("familyName")) { + writeFullElement(xmlw, null, "familyName", null, contributorObj.getString("familyName"), language); } if (StringUtils.isNotBlank(contributorAffiliation)) { @@ -1291,26 +1203,17 @@ public static void writeDescriptionsElement(XMLStreamWriter xmlw, DatasetVersion if (DatasetFieldConstant.series.equals(fieldDTO.getTypeName())) { // String seriesName = null; String seriesInformation = null; - - Set fieldDTOs = fieldDTO.getSingleCompound(); - for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { - FieldDTO next = iterator.next(); - /*if (DatasetFieldConstant.seriesName.equals(next.getTypeName())) { - seriesName = next.getSinglePrimitive(); - }*/ - if (DatasetFieldConstant.seriesInformation.equals(next.getTypeName())) { - seriesInformation = next.getSinglePrimitive(); + for (HashSet fieldDTOs : fieldDTO.getMultipleCompound()) { + for (Iterator iterator = fieldDTOs.iterator(); iterator.hasNext();) { + FieldDTO next = iterator.next(); + if (DatasetFieldConstant.seriesInformation.equals(next.getTypeName())) { + seriesInformation = next.getSinglePrimitive(); + } + } + if (StringUtils.isNotBlank(seriesInformation)) { + description_check = writeOpenTag(xmlw, "descriptions", description_check); + writeDescriptionElement(xmlw, "SeriesInformation", seriesInformation, language); } - } - - /*if (StringUtils.isNotBlank(seriesName)){ - contributor_check = writeOpenTag(xmlw, "descriptions", description_check); - - writeDescriptionElement(xmlw, "SeriesInformation", seriesName); - }*/ - if (StringUtils.isNotBlank(seriesInformation)) { - description_check = writeOpenTag(xmlw, "descriptions", description_check); - writeDescriptionElement(xmlw, "SeriesInformation", seriesInformation, language); } } } From 3bb7cbfae2072f7bb4f5b11567cd0b02c0b4bb02 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Thu, 11 May 2023 17:27:05 -0400 Subject: [PATCH 0146/1525] add prodPlac --- .../iq/dataverse/api/imports/ImportDDIServiceBean.java | 8 +++++--- .../harvard/iq/dataverse/export/ddi/DdiExportUtil.java | 6 +++++- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index bafd7267acb..ae98e7e76ea 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -1339,6 +1339,7 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th List> producers = new ArrayList<>(); List> grants = new ArrayList<>(); List> software = new ArrayList<>(); + List prodPlac = new ArrayList<>(); for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { if (event == XMLStreamConstants.START_ELEMENT) { @@ -1354,9 +1355,7 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th } else if (xmlr.getLocalName().equals("prodDate")) { citation.getFields().add(FieldDTO.createPrimitiveFieldDTO("productionDate", parseDate(xmlr, "prodDate"))); } else if (xmlr.getLocalName().equals("prodPlac")) { - List prodPlac = new ArrayList<>(); - prodPlac.add(parseText(xmlr, "prodPlac")); - citation.getFields().add(FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.productionPlace, prodPlac)); + prodPlac.add(parseText(xmlr)); } else if (xmlr.getLocalName().equals("software")) { HashSet set = new HashSet<>(); addToSet(set,"softwareVersion", xmlr.getAttributeValue(null, "version")); @@ -1389,6 +1388,9 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th if (producers.size()>0) { citation.getFields().add(FieldDTO.createMultipleCompoundFieldDTO("producer", producers)); } + if (prodPlac.size() > 0) { + citation.getFields().add(FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.productionPlace, prodPlac)); + } return; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index a647c2a6f2a..cd9311ec518 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -1006,7 +1006,11 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT // productionPlace was made multiple as of 5.14: // (a quick backward compatibility check was added to dto2PrimitiveList(), // see the method for details) - writeFullElementList(xmlw, "prodPlac", dto2PrimitiveList(version, DatasetFieldConstant.productionPlace)); + + FieldDTO prodPlac = dto2FieldDTO( version, DatasetFieldConstant.productionPlace, "citation" ); + if (prodPlac != null) { + writeMultipleElement(xmlw, "prodPlac", prodPlac, null); + } writeSoftwareElement(xmlw, version); writeGrantElement(xmlw, version); From aeac121cd6740002c06488aada95d536bd74c790 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 12 May 2023 11:19:43 +0200 Subject: [PATCH 0147/1525] more readable checkUpdateDatasetVersionLock implementation --- .../iq/dataverse/PermissionServiceBean.java | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 5d72bd225d2..8c0a0bf90b0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -751,17 +751,10 @@ else if (dataset.isLockedFor(DatasetLock.Reason.InReview)) { } } - public void checkUpdateDatasetVersionLock(Dataset dataset, DataverseRequest dataverseRequest, Command command) throws IllegalCommandException { - boolean locked = false; - if (dataset.isLocked()) { - for (final DatasetLock lock: dataset.getLocks()) { - if (lock.getReason() != DatasetLock.Reason.Ingest) { - locked = true; - break; - } - } - } - if (locked) { + public void checkUpdateDatasetVersionLock(Dataset dataset, DataverseRequest dataverseRequest, Command command) throws IllegalCommandException { + boolean hasAtLeastOneLockThatIsNotAnIngestLock = dataset.isLocked() && dataset.getLocks().stream() + .anyMatch(lock -> !DatasetLock.Reason.Ingest.equals(lock.getReason())); + if (hasAtLeastOneLockThatIsNotAnIngestLock) { checkEditDatasetLock(dataset, dataverseRequest, command); } } From 2ef9297958014c326e2c111d64ac6578229d59a6 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 12 May 2023 17:05:12 +0100 Subject: [PATCH 0148/1525] Added: API endpoint for getting dataset summary field names (pending IT) --- .../harvard/iq/dataverse/api/Datasets.java | 12 +++++ .../iq/dataverse/dataset/DatasetUtil.java | 46 ++++++++++--------- .../iq/dataverse/dataset/DatasetUtilTest.java | 20 +++++++- 3 files changed, 54 insertions(+), 24 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index d40bc153141..32515ac28cc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3849,4 +3849,16 @@ public Response getExternalToolDVParams(@Context ContainerRequestContext crc, return wr.getResponse(); } } + + @GET + @Path("summaryFieldNames") + public Response getDatasetSummaryFieldNames() { + String customFieldNames = settingsService.getValueForKey(SettingsServiceBean.Key.CustomDatasetSummaryFields); + String[] fieldNames = DatasetUtil.getDatasetSummaryFieldNames(customFieldNames); + JsonArrayBuilder fieldNamesArrayBuilder = Json.createArrayBuilder(); + for (String fieldName : fieldNames) { + fieldNamesArrayBuilder.add(fieldName); + } + return ok(fieldNamesArrayBuilder); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index 9e805a304a5..a75775810d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -44,6 +44,7 @@ public class DatasetUtil { private static final Logger logger = Logger.getLogger(DatasetUtil.class.getCanonicalName()); + public static final String datasetDefaultSummaryFieldNames = "dsDescription,subject,keyword,publication,notesText"; public static String datasetLogoFilenameFinal = "dataset_logo_original"; public static String datasetLogoThumbnail = "dataset_logo"; public static String thumbExtension = ".thumb"; @@ -429,32 +430,33 @@ public static boolean isDatasetLogoPresent(Dataset dataset, int size) { return false; } - public static List getDatasetSummaryFields(DatasetVersion datasetVersion, String customFields) { - - List datasetFields = new ArrayList<>(); - - //if customFields are empty, go with default fields. - if(customFields==null || customFields.isEmpty()){ - customFields="dsDescription,subject,keyword,publication,notesText"; - } - - String[] customFieldList= customFields.split(","); - Map DatasetFieldsSet=new HashMap<>(); - + public static List getDatasetSummaryFields(DatasetVersion datasetVersion, String customFieldNames) { + Map datasetFieldsSet = new HashMap<>(); for (DatasetField dsf : datasetVersion.getFlatDatasetFields()) { - DatasetFieldsSet.put(dsf.getDatasetFieldType().getName(),dsf); + datasetFieldsSet.put(dsf.getDatasetFieldType().getName(), dsf); + } + String[] summaryFieldNames = getDatasetSummaryFieldNames(customFieldNames); + List datasetSummaryFields = new ArrayList<>(); + for (String summaryFieldName : summaryFieldNames) { + DatasetField df = datasetFieldsSet.get(summaryFieldName); + if (df != null) { + datasetSummaryFields.add(df); + } } - - for(String cfl : customFieldList) - { - DatasetField df = DatasetFieldsSet.get(cfl); - if(df!=null) - datasetFields.add(df); + return datasetSummaryFields; + } + + public static String[] getDatasetSummaryFieldNames(String customFieldNames) { + String summaryFieldNames; + // If the custom fields are empty, go with the default fields. + if(customFieldNames == null || customFieldNames.isEmpty()){ + summaryFieldNames = datasetDefaultSummaryFieldNames; + } else { + summaryFieldNames = customFieldNames; } - - return datasetFields; + return summaryFieldNames.split(","); } - + public static boolean isRsyncAppropriateStorageDriver(Dataset dataset){ // ToDo - rsync was written before multiple store support and currently is hardcoded to use the DataAccess.S3 store. // When those restrictions are lifted/rsync can be configured per store, this test should check that setting diff --git a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java index 93eabfbf8af..46bce999c60 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java @@ -1,7 +1,6 @@ package edu.harvard.iq.dataverse.dataset; import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.DataFileCategory; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetFieldType; @@ -10,7 +9,6 @@ import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.mocks.MocksFactory; -import java.io.InputStream; import java.util.ArrayList; import java.util.List; import org.junit.Test; @@ -65,6 +63,7 @@ public void testGetThumbnailRestricted() { DatasetThumbnail result = DatasetUtil.getThumbnail(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); assertNull(result); } + /** * Test of deleteDatasetLogo method, of class DatasetUtil. */ @@ -160,4 +159,21 @@ public void testGetDatasetSummaryField_withSelectionWithoutMatches() { assertEquals(0, DatasetUtil.getDatasetSummaryFields(version, "object").size()); } + + @Test + public void testGetDatasetSummaryFieldNames_emptyCustomFields() { + String[] actual = DatasetUtil.getDatasetSummaryFieldNames(null); + String[] expected = DatasetUtil.datasetDefaultSummaryFieldNames.split(","); + + assertArrayEquals(expected, actual); + } + + @Test + public void testGetDatasetSummaryFieldNames_notEmptyCustomFields() { + String testCustomFields = "test1,test2"; + String[] actual = DatasetUtil.getDatasetSummaryFieldNames(testCustomFields); + String[] expected = testCustomFields.split(","); + + assertArrayEquals(expected, actual); + } } From cd47f9389e881866a45792b5d9cf9b286c5d7fa7 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 09:30:23 +0200 Subject: [PATCH 0149/1525] chore(deps): update Nimbus SDK to 10.9.1 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 49443f62453..8764e4f493d 100644 --- a/pom.xml +++ b/pom.xml @@ -385,7 +385,7 @@ com.nimbusds oauth2-oidc-sdk - 10.7.1 + 10.9.1 From 94598bd66de2ee5a535cca6aab63b020ad95f65b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 10:04:39 +0200 Subject: [PATCH 0150/1525] feat(auth): pass OAuth2/OIDC state parameter to provider When the client is returning from the provider to us, carrying along the authorization code we need to retrieve user details, we also receive again the state. The state was generated and sent by us, and will not be altered by the provider, which makes it perfect to identify the original request we built before sending the client to the provider. Passing this state to the provider enables the provider to reuse this information. This is crucial to enable PKCE support, as we need to remember which secret code we sent to the provider - otherwise we will not be able to verify the authz code. Tests have been adapted. --- .../oauth2/AbstractOAuth2AuthenticationProvider.java | 3 ++- .../providers/oauth2/OAuth2LoginBackingBean.java | 2 +- .../providers/oauth2/OAuth2LoginBackingBeanTest.java | 11 ++++++++--- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java index 01139cd2e27..373a295487d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java @@ -139,6 +139,7 @@ public OAuth20Service getService(String callbackUrl) { * Receive user data from OAuth2 provider after authn/z has been successfull. (Callback view uses this) * Request a token and access the resource, parse output and return user details. * @param code The authz code sent from the provider + * @param state The state which was communicated between us and the provider, identifying the exact request * @param redirectUrl The redirect URL (some providers require this when fetching the access token, e. g. Google) * @return A user record containing all user details accessible for us * @throws IOException Thrown when communication with the provider fails @@ -146,7 +147,7 @@ public OAuth20Service getService(String callbackUrl) { * @throws InterruptedException Thrown when the requests thread is failing * @throws ExecutionException Thrown when the requests thread is failing */ - public OAuth2UserRecord getUserRecord(String code, String redirectUrl) + public OAuth2UserRecord getUserRecord(String code, String state, String redirectUrl) throws IOException, OAuth2Exception, InterruptedException, ExecutionException { OAuth20Service service = getService(redirectUrl); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java index c5be41a014a..7b52f2e9b16 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java @@ -100,7 +100,7 @@ public void exchangeCodeForToken() throws IOException { if (oIdp.isPresent() && code.isPresent()) { AbstractOAuth2AuthenticationProvider idp = oIdp.get(); - oauthUser = idp.getUserRecord(code.get(), systemConfig.getOAuth2CallbackUrl()); + oauthUser = idp.getUserRecord(code.get(), req.getParameter("state"), systemConfig.getOAuth2CallbackUrl()); // Throw an error if this authentication method is disabled: // (it's not clear if it's possible at all, for somebody to get here with diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java index 80249cc89e8..0c54c050d79 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java @@ -105,6 +105,7 @@ class ecft { @Mock DataverseSession session; @Mock OAuth2TokenDataServiceBean oauth2Tokens; Optional redirect = Optional.of("/hellotest"); + String state; @BeforeEach void setUp() throws IOException { @@ -118,7 +119,11 @@ void setUp() throws IOException { when(externalContextMock.getRequest()).thenReturn(requestMock); lenient().when(externalContextMock.getFlash()).thenReturn(flashMock); lenient().when(requestMock.getReader()).thenReturn(reader); - doReturn(loginBackingBean.createState(testIdp, this.redirect)).when(requestMock).getParameter("state"); + + // Save the state as we need it for injection (necessary because of PKCE support) + state = loginBackingBean.createState(testIdp, this.redirect); + doReturn(state).when(requestMock).getParameter("state"); + // travel in time at least 10 milliseconds (remote calls & redirects are much likely longer) // (if not doing this tests become flaky on fast machinas) loginBackingBean.clock = Clock.offset(constantClock, Duration.ofMillis(10)); @@ -140,7 +145,7 @@ void newUser() throws Exception { // fake the code received from the provider when(requestMock.getParameter("code")).thenReturn(code); // let's deep-fake the result of getUserRecord() - doReturn(userRecord).when(testIdp).getUserRecord(code, null); + doReturn(userRecord).when(testIdp).getUserRecord(code, state, null); // WHEN (& then) // capture the redirect target from the faces context @@ -168,7 +173,7 @@ void existingUser() throws Exception { // fake the code received from the provider when(requestMock.getParameter("code")).thenReturn(code); // let's deep-fake the result of getUserRecord() - doReturn(userRecord).when(testIdp).getUserRecord(code, null); + doReturn(userRecord).when(testIdp).getUserRecord(code, state, null); doReturn(tokenData).when(userRecord).getTokenData(); // also fake the result of the lookup in the auth service doReturn(userIdentifier).when(userRecord).getUserRecordIdentifier(); From 5fbee2e067722e7ff649dd30ae3e8afa90851958 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 10:05:03 +0200 Subject: [PATCH 0151/1525] feat(auth): add OIDC PKCE settings to JvmSettings --- .../java/edu/harvard/iq/dataverse/settings/JvmSettings.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index dc9267805e6..ba90d895ae9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -121,6 +121,9 @@ public enum JvmSettings { OIDC_AUTH_SERVER_URL(SCOPE_OIDC, "auth-server-url"), OIDC_CLIENT_ID(SCOPE_OIDC, "client-id"), OIDC_CLIENT_SECRET(SCOPE_OIDC, "client-secret"), + SCOPE_OIDC_PKCE(SCOPE_OIDC, "pkce"), + OIDC_PKCE_ENABLED(SCOPE_OIDC_PKCE, "enabled"), + OIDC_PKCE_METHOD(SCOPE_OIDC_PKCE, "method"), ; From c0d21cc9f935b3dc43653baa3d132681ff94c1c0 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 10:07:35 +0200 Subject: [PATCH 0152/1525] feat(auth): enable PKCE usage in OIDC provider - Adding PKCE parameters to constructor - Adding a hashmap to cache the code verifiers mapped by the unique state we generate - Enabling the actual workflow of PKCE --- .../oauth2/oidc/OIDCAuthProvider.java | 38 ++++++++++++++++--- 1 file changed, 33 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index 4b6c575cfaf..91d552618ef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -18,6 +18,8 @@ import com.nimbusds.oauth2.sdk.id.ClientID; import com.nimbusds.oauth2.sdk.id.Issuer; import com.nimbusds.oauth2.sdk.id.State; +import com.nimbusds.oauth2.sdk.pkce.CodeChallengeMethod; +import com.nimbusds.oauth2.sdk.pkce.CodeVerifier; import com.nimbusds.oauth2.sdk.token.BearerAccessToken; import com.nimbusds.openid.connect.sdk.AuthenticationRequest; import com.nimbusds.openid.connect.sdk.Nonce; @@ -39,7 +41,9 @@ import java.net.URI; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.logging.Logger; @@ -57,12 +61,25 @@ public class OIDCAuthProvider extends AbstractOAuth2AuthenticationProvider { final Issuer issuer; final ClientAuthentication clientAuth; final OIDCProviderMetadata idpMetadata; + final boolean pkceEnabled; + final CodeChallengeMethod pkceMethod; - public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEndpointURL) throws AuthorizationSetupException { + /** + * Using PKCE, we create and send a special {@link CodeVerifier}. This contains a secret + * we need again when verifying the response by the provider, thus the cache. + */ + private final Map verifierCache = new ConcurrentHashMap<>(); + + public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEndpointURL, + boolean pkceEnabled, String pkceMethod) throws AuthorizationSetupException { this.clientSecret = aClientSecret; // nedded for state creation this.clientAuth = new ClientSecretBasic(new ClientID(aClientId), new Secret(aClientSecret)); this.issuer = new Issuer(issuerEndpointURL); + this.idpMetadata = getMetadata(); + + this.pkceEnabled = pkceEnabled; + this.pkceMethod = CodeChallengeMethod.parse(pkceMethod); } /** @@ -147,6 +164,7 @@ public String buildAuthzUrl(String state, String callbackUrl) { State stateObject = new State(state); URI callback = URI.create(callbackUrl); Nonce nonce = new Nonce(); + CodeVerifier pkceVerifier = pkceEnabled ? new CodeVerifier() : null; AuthenticationRequest req = new AuthenticationRequest.Builder(new ResponseType("code"), Scope.parse(this.scope), @@ -154,9 +172,15 @@ public String buildAuthzUrl(String state, String callbackUrl) { callback) .endpointURI(idpMetadata.getAuthorizationEndpointURI()) .state(stateObject) + // Called method is nullsafe - will disable sending a PKCE challenge in case the verifier is not present + .codeChallenge(pkceVerifier, pkceMethod) .nonce(nonce) .build(); + // Cache the PKCE verifier, as we need the secret in it for verification later again, after the client sends us + // the auth code! We use the state to cache the verifier, as the state is unique per authentication event. + this.verifierCache.put(state, pkceVerifier); + return req.toURI().toString(); } @@ -172,10 +196,14 @@ public String buildAuthzUrl(String state, String callbackUrl) { * @throws ExecutionException Thrown when the requests thread is failing */ @Override - public OAuth2UserRecord getUserRecord(String code, String redirectUrl) - throws IOException, OAuth2Exception, InterruptedException, ExecutionException { - // Create grant object - AuthorizationGrant codeGrant = new AuthorizationCodeGrant(new AuthorizationCode(code), URI.create(redirectUrl)); + public OAuth2UserRecord getUserRecord(String code, String state, String redirectUrl) throws IOException, OAuth2Exception { + // Retrieve the verifier from the cache and clear from the cache. If not found, will be null. + // Will be sent to token endpoint for verification, so if required but missing, will lead to exception. + CodeVerifier verifier = verifierCache.remove(state); + + // Create grant object - again, this is null-safe for the verifier + AuthorizationGrant codeGrant = new AuthorizationCodeGrant( + new AuthorizationCode(code), URI.create(redirectUrl), verifier); // Get Access Token first Optional accessToken = getAccessToken(codeGrant); From ef4d192156bd7efd6e83226c57dd7deea545a6e2 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 10:08:37 +0200 Subject: [PATCH 0153/1525] feat(auth): extend OIDC provider factory to understand PKCE parameters To enable backward compatibility, default to disabled and method S256. --- .../oauth2/oidc/OIDCAuthenticationProviderFactory.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java index 89cf1cb986d..3f8c18d0567 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java @@ -41,7 +41,9 @@ public AuthenticationProvider buildProvider( AuthenticationProviderRow aRow ) th OIDCAuthProvider oidc = new OIDCAuthProvider( factoryData.get("clientId"), factoryData.get("clientSecret"), - factoryData.get("issuer") + factoryData.get("issuer"), + Boolean.parseBoolean(factoryData.getOrDefault("pkceEnabled", "false")), + factoryData.getOrDefault("pkceMethod", "S256") ); oidc.setId(aRow.getId()); @@ -60,7 +62,9 @@ public static AuthenticationProvider buildFromSettings() throws AuthorizationSet OIDCAuthProvider oidc = new OIDCAuthProvider( JvmSettings.OIDC_CLIENT_ID.lookup(), JvmSettings.OIDC_CLIENT_SECRET.lookup(), - JvmSettings.OIDC_AUTH_SERVER_URL.lookup() + JvmSettings.OIDC_AUTH_SERVER_URL.lookup(), + JvmSettings.OIDC_PKCE_ENABLED.lookupOptional(Boolean.class).orElse(false), + JvmSettings.OIDC_PKCE_METHOD.lookupOptional().orElse("S256") ); oidc.setId("oidc-mpconfig"); From 37bcc3a69930879810c7a7eb87f465219a00a24d Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 10:28:32 +0200 Subject: [PATCH 0154/1525] doc(auth): add OIDC PKCE configuration --- .../source/installation/oidc.rst | 34 ++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/oidc.rst b/doc/sphinx-guides/source/installation/oidc.rst index fbcbd3eb4ad..9848d73b189 100644 --- a/doc/sphinx-guides/source/installation/oidc.rst +++ b/doc/sphinx-guides/source/installation/oidc.rst @@ -69,9 +69,32 @@ After adding a provider, the Log In page will by default show the "builtin" prov In contrast to our :doc:`oauth2`, you can use multiple providers by creating distinct configurations enabled by the same technology and without modifying the Dataverse Software code base (standards for the win!). + +.. _oidc-pkce: + +Enabling PKCE Security +^^^^^^^^^^^^^^^^^^^^^^ + +Many providers these days support or even require the usage of `PKCE `_ to safeguard against +some attacks and enable public clients that cannot have a secure secret to still use OpenID Connect (or OAuth2). + +The Dataverse built OIDC client can be enabled to use PKCE and which method to use when creating the code challenge. +See also `this explanation of the flow `_ +for details on how this works. + +As we are using the `Nimbus SDK `_ as our client +library, we support the standard ``PLAIN`` and ``S256`` code challenge methods. "SHA-256 method" is the default +as recommend in `RFC7636 `_. If your provider needs some +other method (unlikely), please open an issue. + +The provisioning sections below contain in the example the parameters you may use to configure PKCE. + Provision via REST API ^^^^^^^^^^^^^^^^^^^^^^ +Note: you may omit the PKCE related settings from ``factoryData`` below if you don't plan on using PKCE - default is +disabled. + Please create a ``my-oidc-provider.json`` file like this, replacing every ``<...>`` with your values: .. code-block:: json @@ -81,7 +104,7 @@ Please create a ``my-oidc-provider.json`` file like this, replacing every ``<... "factoryAlias":"oidc", "title":"", "subtitle":"", - "factoryData":"type: oidc | issuer: | clientId: | clientSecret: ", + "factoryData":"type: oidc | issuer: | clientId: | clientSecret: | pkceEnabled: | pkceMethod: ", "enabled":true } @@ -105,6 +128,7 @@ The following options are available: .. list-table:: :widths: 25 55 10 10 :header-rows: 1 + :align: left * - Option - Description @@ -126,6 +150,14 @@ The following options are available: - The base URL of the OpenID Connect (OIDC) server as explained above. - Y - \- + * - ``dataverse.auth.oidc.pkce.enabled`` + - Set to ``true`` to enable :ref:`PKCE ` in auth flow. + - N + - ``false`` + * - ``dataverse.auth.oidc.pkce.method`` + - Set code challenge method. Default equals best practice. + - N + - ``S256`` * - ``dataverse.auth.oidc.title`` - The UI visible name for this provider in login options. - N From 4a622a095ffc8a2bd471c7155d3d18e5abd81ca4 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 15 May 2023 10:04:15 +0100 Subject: [PATCH 0155/1525] Added: getDatasetSummaryFieldNames IT --- .../java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 11 +++++++++++ .../java/edu/harvard/iq/dataverse/api/UtilIT.java | 7 +++++++ 2 files changed, 18 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 6988fc333a3..5ece0e0d018 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -77,6 +77,7 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import static org.junit.Assert.assertFalse; public class DatasetsIT { @@ -3051,4 +3052,14 @@ public void testArchivalStatusAPI() throws IOException { } + @Test + public void testGetDatasetSummaryFieldNames() { + Response summaryFieldNamesResponse = UtilIT.getDatasetSummaryFieldNames(); + summaryFieldNamesResponse.then().assertThat().statusCode(OK.getStatusCode()); + JsonArray actualSummaryFields; + try (StringReader rdr = new StringReader(summaryFieldNamesResponse.body().asString())) { + actualSummaryFields = Json.createReader(rdr).readObject().getJsonArray("data"); + } + assertFalse(actualSummaryFields.isEmpty()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 1937905b56f..7c45155a672 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3190,4 +3190,11 @@ static Response logout() { .post("/api/logout"); return response; } + + static Response getDatasetSummaryFieldNames() { + Response response = given() + .contentType("application/json") + .get("/api/datasets/summaryFieldNames"); + return response; + } } From 15a47b1cb2f6afb039646462c1d28cf5c852847a Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 15 May 2023 10:05:49 +0100 Subject: [PATCH 0156/1525] Added: missing LogoutIT to integration-tests.txt --- tests/integration-tests.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration-tests.txt b/tests/integration-tests.txt index 9c955416361..18911b3164a 100644 --- a/tests/integration-tests.txt +++ b/tests/integration-tests.txt @@ -1 +1 @@ -DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,HarvestingClientsIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT,AuxiliaryFilesIT,InvalidCharactersIT,LicensesIT,NotificationsIT,BagIT,MetadataBlocksIT,NetcdfIT,SignpostingIT,FitsIT +DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,HarvestingClientsIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT,AuxiliaryFilesIT,InvalidCharactersIT,LicensesIT,NotificationsIT,BagIT,MetadataBlocksIT,NetcdfIT,SignpostingIT,FitsIT,LogoutIT From 5ed66e6bb481bd94c81965b4032e642a058943e4 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 12:20:20 +0200 Subject: [PATCH 0157/1525] chore(deps): update to Testcontainers 1.18.1 --- modules/dataverse-parent/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 7f8f73e049a..060fc22b4d2 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -167,7 +167,7 @@ 5.0.0 - 1.17.6 + 1.18.1 2.10.1 4.13.1 From b9720c868b89e9db191b1425fb31574771bc1cee Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 15 May 2023 12:35:42 -0400 Subject: [PATCH 0158/1525] Update src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java Co-authored-by: Philip Durbin --- .../java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index cd9311ec518..819a14c6c68 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -202,7 +202,6 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto) writeFullElement(xmlw, "titl", dto2Primitive(version, DatasetFieldConstant.title), datasetDto.getMetadataLanguage()); writeFullElement(xmlw, "subTitl", dto2Primitive(version, DatasetFieldConstant.subTitle)); - //writeFullElement(xmlw, "altTitl", dto2Primitive(version, DatasetFieldConstant.alternativeTitle)); FieldDTO altField = dto2FieldDTO( version, DatasetFieldConstant.alternativeTitle, "citation" ); if (altField != null) { writeMultipleElement(xmlw, "altTitl", altField, datasetDto.getMetadataLanguage()); From 5684140dff737f092f195ffeefddbf5074a409e5 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 15 May 2023 12:37:18 -0400 Subject: [PATCH 0159/1525] Update src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java Co-authored-by: Philip Durbin --- .../iq/dataverse/export/openaire/OpenAireExportUtil.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index 334b18f4601..e858dee6d2b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -331,7 +331,7 @@ public static void writeTitlesElement(XMLStreamWriter xmlw, DatasetVersionDTO da private static boolean writeMultipleTitleElement(XMLStreamWriter xmlw, String titleType, DatasetVersionDTO datasetVersionDTO, String metadataBlockName, boolean title_check, String language) throws XMLStreamException { MetadataBlockDTO block = datasetVersionDTO.getMetadataBlocks().get(metadataBlockName); if (block != null) { - logger.info("Block is not empty"); + logger.fine("Block is not empty"); List fieldsBlock = block.getFields(); if (fieldsBlock != null) { for (FieldDTO fieldDTO : fieldsBlock) { From 64f4f1f0af83384b8157f2b13d29d941cb2aac77 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 15 May 2023 12:37:31 -0400 Subject: [PATCH 0160/1525] Update src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java Co-authored-by: Philip Durbin --- .../iq/dataverse/export/openaire/OpenAireExportUtil.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index e858dee6d2b..146c442526a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -335,7 +335,7 @@ private static boolean writeMultipleTitleElement(XMLStreamWriter xmlw, String ti List fieldsBlock = block.getFields(); if (fieldsBlock != null) { for (FieldDTO fieldDTO : fieldsBlock) { - logger.info(titleType + " " + fieldDTO.getTypeName()); + logger.fine(titleType + " " + fieldDTO.getTypeName()); if (titleType.toLowerCase().equals(fieldDTO.getTypeName().toLowerCase())) { logger.info("Found Alt title"); List fields = fieldDTO.getMultiplePrimitive(); From 37a372a23c92d4eb2c946a50bf00b91319e141de Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 15 May 2023 12:37:44 -0400 Subject: [PATCH 0161/1525] Update src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java Co-authored-by: Philip Durbin --- .../iq/dataverse/export/openaire/OpenAireExportUtil.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index 146c442526a..037428d0ea1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -337,7 +337,7 @@ private static boolean writeMultipleTitleElement(XMLStreamWriter xmlw, String ti for (FieldDTO fieldDTO : fieldsBlock) { logger.fine(titleType + " " + fieldDTO.getTypeName()); if (titleType.toLowerCase().equals(fieldDTO.getTypeName().toLowerCase())) { - logger.info("Found Alt title"); + logger.fine("Found Alt title"); List fields = fieldDTO.getMultiplePrimitive(); for (String value : fields) { if (!writeTitleElement(xmlw, titleType, value, title_check, language)) From 2ef0e5f0231367b90e962e73a638aab4c84a9ada Mon Sep 17 00:00:00 2001 From: lubitchv Date: Mon, 15 May 2023 12:39:32 -0400 Subject: [PATCH 0162/1525] test --- .../iq/dataverse/export/openaire/OpenAireExportUtil.java | 2 -- .../edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json | 6 ++++++ .../edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml | 2 ++ 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index 334b18f4601..6dca1ac348a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -322,8 +322,6 @@ public static void writeTitlesElement(XMLStreamWriter xmlw, DatasetVersionDTO da String subtitle = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.subTitle); title_check = writeTitleElement(xmlw, "Subtitle", subtitle, title_check, language); - //String alternativeTitle = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.alternativeTitle); - //title_check = writeTitleElement(xmlw, "AlternativeTitle", alternativeTitle, title_check, language); title_check = writeMultipleTitleElement(xmlw, "AlternativeTitle", datasetVersionDTO, "citation", title_check, language); writeEndTag(xmlw, title_check); } diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json index 7845f77d33f..9bdc7e45349 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json +++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json @@ -29,6 +29,12 @@ "typeClass": "primitive", "value": "Darwin's Finches" }, + { + "typeName": "alternativeTitle", + "multiple": true, + "typeClass": "primitive", + "value": ["Darwin's Finches Alternative Title1", "Darwin's Finches Alternative Title2"] + }, { "typeName": "author", "multiple": true, diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml index 5bbfdae09ac..6730c44603a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml +++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml @@ -17,6 +17,8 @@ Darwin's Finches + Darwin's Finches Alternative Title1 + Darwin's Finches Alternative Title2 doi:10.5072/FK2/PCA2E3 From 4b4c9155048f0ee074f6ba9d01a12e02ea4abd00 Mon Sep 17 00:00:00 2001 From: lubitchv Date: Mon, 15 May 2023 12:54:37 -0400 Subject: [PATCH 0163/1525] docs --- doc/release-notes/9428-alternative-title.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/release-notes/9428-alternative-title.md b/doc/release-notes/9428-alternative-title.md index d6eaa680612..3bc74f218b5 100644 --- a/doc/release-notes/9428-alternative-title.md +++ b/doc/release-notes/9428-alternative-title.md @@ -3,4 +3,7 @@ Alternative Title is made repeatable. `curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv` - One will also need to update solr schema: Change in "alternativeTitle" field multiValued="true" in `/usr/local/solr/solr-8.11.1/server/solr/collection1/conf/schema.xml` -Reload solr schema: `curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1"` +Reload solr schema: `curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1"` + +Since Alternative Title is repeatable now, old json apis would not be compatable with a new version since value of alternative title has changed from simple string to an array. +For example, instead "value": "Alternative Title", the value canbe "value": ["Alternative Title1", "Alternative Title2"] From d5761271fb9ed00d1bb0236a7a97779cb4ce1898 Mon Sep 17 00:00:00 2001 From: sirineREKIK Date: Tue, 16 May 2023 09:34:12 +0200 Subject: [PATCH 0164/1525] add MyData API to dataverse documentaion --- doc/sphinx-guides/source/api/native-api.rst | 37 +++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 6f10b28f55b..09096667650 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -4505,6 +4505,43 @@ A curl example using allowing access to a dataset's metadata Please see :ref:`dataverse.api.signature-secret` for the configuration option to add a shared secret, enabling extra security. + +MyData +----- + +The MyData API is used to get a list of just the datasets, dataverses or datafiles an authenticated user can edit. + +MyData API +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +List objects:: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ROLE_IDS=6 + export DVOBJECT_TYPES=Dataset + export PUBLISHED_STATES=Unpublished + export PER_PAGE=10 + + curl -H GET http://$SERVER_URL/api/mydata/retrieve?key=$API_TOKEN&role_ids=$ROLE_IDS&dvobject_types=$DVOBJECT_TYPES&published_states=$PUBLISHED_STATES&per_page=$PER_PAGE + +``key`` is the user token, for this API is must not be passed in the header. +``role_id`` User roles, several possible values among: +- ``1`` = Admin +- ``2`` = File Downloader +- ``3`` = Dataverse + Dataset Creator +- ``4`` = Dataverse Creator +- ``5`` = Dataset Creator +- ``6`` = Contributor +- ``7`` = Curator +- ``8`` = Member +``dvobject_types`` Type of object, several possible values among: ``DataFile``, ``Dataset``& ``Dataverse`` +``published_states`` State of the object, several possible values among:``Published``, ``Unpublished``, ``Draft``, ``Deaccessioned`` & ``In+Review`` +``per_page`` Number of results returned per page + + .. _send-feedback: Send Feedback To Contact(s) From 17927026dbc9bf0148f68234f2c39ab58526f799 Mon Sep 17 00:00:00 2001 From: sirineREKIK Date: Tue, 16 May 2023 15:06:20 +0200 Subject: [PATCH 0165/1525] fix sphinx error --- doc/sphinx-guides/source/api/native-api.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 09096667650..20050219c2f 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -4507,12 +4507,12 @@ security. MyData ------ +------ The MyData API is used to get a list of just the datasets, dataverses or datafiles an authenticated user can edit. MyData API -~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~ List objects:: From ccd295fa3fc43fbf2d6d0ab43e0ac8acaab4eedb Mon Sep 17 00:00:00 2001 From: sirineREKIK Date: Tue, 16 May 2023 15:25:45 +0200 Subject: [PATCH 0166/1525] fix layout --- doc/sphinx-guides/source/api/native-api.rst | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 20050219c2f..83084411fa7 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -4514,7 +4514,7 @@ The MyData API is used to get a list of just the datasets, dataverses or datafil MyData API ~~~~~~~~~~ -List objects:: +A curl example listing objects .. code-block:: bash @@ -4527,8 +4527,11 @@ List objects:: curl -H GET http://$SERVER_URL/api/mydata/retrieve?key=$API_TOKEN&role_ids=$ROLE_IDS&dvobject_types=$DVOBJECT_TYPES&published_states=$PUBLISHED_STATES&per_page=$PER_PAGE +Parameters: + ``key`` is the user token, for this API is must not be passed in the header. ``role_id`` User roles, several possible values among: + - ``1`` = Admin - ``2`` = File Downloader - ``3`` = Dataverse + Dataset Creator @@ -4537,9 +4540,10 @@ List objects:: - ``6`` = Contributor - ``7`` = Curator - ``8`` = Member -``dvobject_types`` Type of object, several possible values among: ``DataFile``, ``Dataset``& ``Dataverse`` -``published_states`` State of the object, several possible values among:``Published``, ``Unpublished``, ``Draft``, ``Deaccessioned`` & ``In+Review`` -``per_page`` Number of results returned per page + +``dvobject_types`` Type of object, several possible values among: ``DataFile``, ``Dataset``& ``Dataverse``. +``published_states`` State of the object, several possible values among:``Published``, ``Unpublished``, ``Draft``, ``Deaccessioned`` & ``In+Review``. +``per_page`` Number of results returned per page. .. _send-feedback: From 0587a7a80f664254a2cb635501fcc34e7b736d41 Mon Sep 17 00:00:00 2001 From: sirineREKIK Date: Tue, 16 May 2023 15:47:41 +0200 Subject: [PATCH 0167/1525] change position api documentation --- doc/sphinx-guides/source/api/native-api.rst | 57 +++++++++++---------- 1 file changed, 30 insertions(+), 27 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 83084411fa7..2380a5142da 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -4506,6 +4506,33 @@ Please see :ref:`dataverse.api.signature-secret` for the configuration option to security. +.. _send-feedback: + +Send Feedback To Contact(s) +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This API call allows sending an email to the contacts for a collection, dataset, or datafile or to the support email address when no object is specified. +The call is protected by the normal /admin API protections (limited to localhost or requiring a separate key), but does not otherwise limit the sending of emails. +Administrators should be sure only trusted applications have access to avoid the potential for spam. + +The call is a POST with a JSON object as input with four keys: +- "targetId" - the id of the collection, dataset, or datafile. Persistent ids and collection aliases are not supported. (Optional) +- "subject" - the email subject line +- "body" - the email body to send +- "fromEmail" - the email to list in the reply-to field. (Dataverse always sends mail from the system email, but does it "on behalf of" and with a reply-to for the specified user.) + +A curl example using an ``ID`` + +.. code-block:: bash + + export SERVER_URL=http://localhost + export JSON='{"targetId":24, "subject":"Data Question", "body":"Please help me understand your data. Thank you!", "fromEmail":"dataverseSupport@mailinator.com"}' + + curl -X POST -H 'Content-Type:application/json' -d "$JSON" $SERVER_URL/api/admin/feedback + +Note that this call could be useful in coordinating with dataset authors (assuming they are also contacts) as an alternative/addition to the functionality provided by :ref:`return-a-dataset`. + + MyData ------ @@ -4541,33 +4568,9 @@ Parameters: - ``7`` = Curator - ``8`` = Member -``dvobject_types`` Type of object, several possible values among: ``DataFile``, ``Dataset``& ``Dataverse``. -``published_states`` State of the object, several possible values among:``Published``, ``Unpublished``, ``Draft``, ``Deaccessioned`` & ``In+Review``. -``per_page`` Number of results returned per page. - - -.. _send-feedback: - -Send Feedback To Contact(s) -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -This API call allows sending an email to the contacts for a collection, dataset, or datafile or to the support email address when no object is specified. -The call is protected by the normal /admin API protections (limited to localhost or requiring a separate key), but does not otherwise limit the sending of emails. -Administrators should be sure only trusted applications have access to avoid the potential for spam. - -The call is a POST with a JSON object as input with four keys: -- "targetId" - the id of the collection, dataset, or datafile. Persistent ids and collection aliases are not supported. (Optional) -- "subject" - the email subject line -- "body" - the email body to send -- "fromEmail" - the email to list in the reply-to field. (Dataverse always sends mail from the system email, but does it "on behalf of" and with a reply-to for the specified user.) +``dvobject_types`` Type of object, several possible values among: ``DataFile`` , ``Dataset`` & ``Dataverse`` . -A curl example using an ``ID`` - -.. code-block:: bash +``published_states`` State of the object, several possible values among:``Published`` , ``Unpublished`` , ``Draft`` , ``Deaccessioned`` & ``In+Review`` . - export SERVER_URL=http://localhost - export JSON='{"targetId":24, "subject":"Data Question", "body":"Please help me understand your data. Thank you!", "fromEmail":"dataverseSupport@mailinator.com"}' - - curl -X POST -H 'Content-Type:application/json' -d "$JSON" $SERVER_URL/api/admin/feedback +``per_page`` Number of results returned per page. -Note that this call could be useful in coordinating with dataset authors (assuming they are also contacts) as an alternative/addition to the functionality provided by :ref:`return-a-dataset`. From 4db0d948dd3977f5c78a79243fd0bd8e23095d72 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Wed, 17 May 2023 13:10:49 +0200 Subject: [PATCH 0168/1525] fix for locking of the dataset for reindexing when unexpected exception is not caught --- .../java/edu/harvard/iq/dataverse/search/IndexServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 5fb7dca79f7..4c8a0a24aef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -413,7 +413,7 @@ public void asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) { while (next != null) { try { indexDataset(next, doNormalSolrDocCleanUp); - } catch (SolrServerException | IOException e) { + } catch (Exception e) { // catch all possible exceptions; otherwise when something unexpected happes the dataset wold remain locked and impossible to reindex String failureLogText = "Indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); failureLogText += "\r\n" + e.getLocalizedMessage(); LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset); From 07c70d8933d550eb01f3bd2cfb3626b9a2d94f8c Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 6 Dec 2022 13:01:01 -0500 Subject: [PATCH 0169/1525] flyway update script --- .../resources/db/migration/V5.13.0.3__guestbook-on-request.sql | 1 + 1 file changed, 1 insertion(+) create mode 100644 src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql diff --git a/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql b/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql new file mode 100644 index 00000000000..1ffc87dfa32 --- /dev/null +++ b/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql @@ -0,0 +1 @@ +ALTER TABLE fileaccessrequests ADD COLUMN IF NOT EXISTS request_state VARCHAR(64); From c8726958e45d6e25bc4ca773c55ce58afc1bf431 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 6 Dec 2022 13:56:33 -0500 Subject: [PATCH 0170/1525] fix duplicate action --- src/main/webapp/filesFragment.xhtml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 6122b86b274..7395998042f 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -483,7 +483,8 @@ styleClass="btn btn-default btn-request" action="#{DatasetPage.requestAccessMultipleFiles()}" update="@form, @([id$=messagePanel])" - disabled="#{DatasetPage.locked}"> + disabled="#{DatasetPage.locked or !DatasetPage.fileAccessRequestMultiButtonEnabled}"> + #{bundle['file.requestAccess']} Date: Tue, 6 Dec 2022 13:57:13 -0500 Subject: [PATCH 0171/1525] add comment to check possible unused method --- .../edu/harvard/iq/dataverse/DatasetPage.java | 62 +++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 9294620d790..33b598083da 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3209,6 +3209,68 @@ private void updateGuestbookResponse (boolean guestbookRequired, boolean downloa } + /*helper function to filter the selected files into , + and and for reuse*/ + + private boolean filterSelectedFiles(){ + setSelectedDownloadableFiles(new ArrayList<>()); + setSelectedNonDownloadableFiles(new ArrayList<>()); + setSelectedRestrictedFiles(new ArrayList<>()); + setSelectedUnrestrictedFiles(new ArrayList<>()); + + boolean someFiles = false; + for (FileMetadata fmd : this.selectedFiles){ + if(this.fileDownloadHelper.canDownloadFile(fmd)){ + getSelectedDownloadableFiles().add(fmd); + someFiles=true; + } else { + getSelectedNonDownloadableFiles().add(fmd); + } + if(fmd.isRestricted()){ + getSelectedRestrictedFiles().add(fmd); //might be downloadable to user or not + someFiles=true; + } else { + getSelectedUnrestrictedFiles().add(fmd); + someFiles=true; + } + + } + return someFiles; + } +//QDRADA - still needed? + public void validateFilesForRequestAccess(){ + this.filterSelectedFiles(); + + if(!dataset.isFileAccessRequest()){ //is this needed? wouldn't be able to click Request Access if this !isFileAccessRequest() + return; + } + + if(!this.selectedRestrictedFiles.isEmpty()){ + ArrayList nonDownloadableRestrictedFiles = new ArrayList<>(); + + List userRequestedDataFiles = ((AuthenticatedUser) session.getUser()).getRequestedDataFiles(); + + for(FileMetadata fmd : this.selectedRestrictedFiles){ + if(!this.fileDownloadHelper.canDownloadFile(fmd) && !userRequestedDataFiles.contains(fmd.getDataFile())){ + nonDownloadableRestrictedFiles.add(fmd); + } + } + + if(!nonDownloadableRestrictedFiles.isEmpty()){ + guestbookResponse.setDataFile(null); + guestbookResponse.setSelectedFileIds(this.getFilesIdsString(nonDownloadableRestrictedFiles)); + + if(this.isGuestbookAndTermsPopupRequired()){ //need to pop up the guestbook and terms dialog + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"); + } else { + this.requestAccessMultipleFiles(); + } + } else { + //popup select data files + } + } + } + private boolean selectAllFiles; public boolean isSelectAllFiles() { From 76b3b181d449336a9659bf5dc54637be90ff8679 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 6 Dec 2022 13:57:30 -0500 Subject: [PATCH 0172/1525] debug logging --- .../iq/dataverse/engine/command/impl/RequestAccessCommand.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java index b87b9a73aa5..df7c7367f2d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java @@ -52,6 +52,8 @@ public DataFile execute(CommandContext ctxt) throws CommandException { } //if user already has permission to download file or the file is public throw command exception + logger.info("User: " + this.getRequest().getAuthenticatedUser().getName()); + logger.info("File: " + file.getId()); if (!file.isRestricted() || ctxt.permissions().requestOn(this.getRequest(), file).has(Permission.DownloadFile)) { throw new CommandException(BundleUtil.getStringFromBundle("file.requestAccess.notAllowed.alreadyHasDownloadPermisssion"), this); } From bd603ec7a803542460eea3a66600572bdf85f57a Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 6 Dec 2022 14:17:45 -0500 Subject: [PATCH 0173/1525] more debug --- .../command/impl/RequestAccessCommand.java | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java index df7c7367f2d..d710ed66551 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java @@ -44,6 +44,19 @@ public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, Boolean s this.sendNotification = sendNotification; } + + public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr) { + this(dvRequest, file, gbr, false); + } + + public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr, Boolean sendNotification) { + // for data file check permission on owning dataset + super(dvRequest, file); + this.file = file; + this.requester = (AuthenticatedUser) dvRequest.getUser(); + this.fileAccessRequest = new FileAccessRequest(file,requester,gbr); + this.sendNotification = sendNotification; + } @Override public DataFile execute(CommandContext ctxt) throws CommandException { @@ -53,7 +66,8 @@ public DataFile execute(CommandContext ctxt) throws CommandException { //if user already has permission to download file or the file is public throw command exception logger.info("User: " + this.getRequest().getAuthenticatedUser().getName()); - logger.info("File: " + file.getId()); + logger.info("File: " + file.getId() + " : restricted?: " + file.isRestricted()); + logger.info("permission?: " + ctxt.permissions().requestOn(this.getRequest(), file).has(Permission.DownloadFile)); if (!file.isRestricted() || ctxt.permissions().requestOn(this.getRequest(), file).has(Permission.DownloadFile)) { throw new CommandException(BundleUtil.getStringFromBundle("file.requestAccess.notAllowed.alreadyHasDownloadPermisssion"), this); } From 5e29a0600b6d4fe59d89191897bc61cd45d78494 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 6 Dec 2022 10:20:39 -0500 Subject: [PATCH 0174/1525] more debug --- .../edu/harvard/iq/dataverse/DataFile.java | 11 + .../iq/dataverse/DataFileServiceBean.java | 53 ++++ .../edu/harvard/iq/dataverse/Dataset.java | 6 + .../edu/harvard/iq/dataverse/DatasetPage.java | 134 +++++++-- .../iq/dataverse/FileAccessRequest.java | 262 ++++++++++++++---- .../FileAccessRequestServiceBean.java | 89 ++++++ .../iq/dataverse/FileDownloadHelper.java | 30 +- .../iq/dataverse/FileDownloadServiceBean.java | 102 ++++++- .../edu/harvard/iq/dataverse/FilePage.java | 6 + .../iq/dataverse/GuestbookResponse.java | 13 +- .../iq/dataverse/ManagePermissionsPage.java | 47 ++++ .../iq/dataverse/UserNotification.java | 2 +- .../UserNotificationServiceBean.java | 22 ++ .../users/AuthenticatedUser.java | 25 ++ .../command/impl/RequestAccessCommand.java | 33 ++- .../harvard/iq/dataverse/util/FileUtil.java | 66 +++++ .../harvard/iq/dataverse/util/MailUtil.java | 2 + src/main/webapp/dataset.xhtml | 24 +- .../file-download-button-fragment.xhtml | 74 ++--- src/main/webapp/file.xhtml | 40 ++- src/main/webapp/filesFragment.xhtml | 9 +- ...l => guestbook-terms-popup-fragment.xhtml} | 17 ++ .../iq/dataverse/util/FileUtilTest.java | 2 +- 23 files changed, 896 insertions(+), 173 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java rename src/main/webapp/{file-request-access-popup-fragment.xhtml => guestbook-terms-popup-fragment.xhtml} (65%) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 28d814d9844..c43800c57ce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -200,6 +200,17 @@ public String toString() { @OneToMany(mappedBy="dataFile", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) private List guestbookResponses; + @OneToMany(mappedBy="dataFile",fetch = FetchType.LAZY,cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST, CascadeType.REFRESH}) + private List fileAccessRequests; + + public List getFileAccessRequests(){ + return fileAccessRequests; + } + + public void setFileAccessRequests(List fARs){ + this.fileAccessRequests = fARs; + } + public List getGuestbookResponses() { return guestbookResponses; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 196f84b6877..449e8d351c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -154,6 +154,27 @@ public DataFile find(Object pk) { }*/ + public List findAll(List fileIds){ + List dataFiles = new ArrayList<>(); + + for (Long fileId : fileIds){ + dataFiles.add(find(fileId)); + } + + return dataFiles; + } + + public List findAll(String fileIdsAsString){ + ArrayList dataFileIds = new ArrayList<>(); + + String[] fileIds = fileIdsAsString.split(","); + for (String fId : fileIds){ + dataFileIds.add(Long.parseLong(fId)); + } + + return findAll(dataFileIds); + } + public DataFile findByGlobalId(String globalId) { return (DataFile) dvObjectService.findByGlobalId(globalId, DvObject.DType.DataFile); } @@ -350,6 +371,18 @@ public FileMetadata findMostRecentVersionFileIsIn(DataFile file) { return fileMetadatas.get(0); } } + + public List findAllCheapAndEasy(String fileIdsAsString){ + //assumption is that the fileIds are separated by ',' + ArrayList dataFilesFound = new ArrayList<>(); + String[] fileIds = fileIdsAsString.split(","); + DataFile df = this.findCheapAndEasy(Long.parseLong(fileIds[0])); + if(df != null){ + dataFilesFound.add(df); + } + + return dataFilesFound; + } public DataFile findCheapAndEasy(Long id) { DataFile dataFile; @@ -802,6 +835,7 @@ public void findFileMetadataOptimizedExperimental(Dataset owner, DatasetVersion dataFile.addFileAccessRequester(au); } + dataFile.setFileAccessRequesters(retrieveFileAccessRequesters(dataFile)); dataFiles.add(dataFile); filesMap.put(dataFile.getId(), i++); } @@ -821,6 +855,25 @@ public void findFileMetadataOptimizedExperimental(Dataset owner, DatasetVersion owner.setFiles(dataFiles); } + private List retrieveFileAccessRequesters(DataFile fileIn) { + List retList = new ArrayList<>(); + + // List requesters = em.createNativeQuery("select authenticated_user_id + // from fileaccessrequests where datafile_id = + // "+fileIn.getId()).getResultList(); + List requesters = em.createNativeQuery("select authenticated_user_id from fileaccessrequests where datafile_id = " + fileIn.getId() + " and request_state='CREATED'").getResultList(); + + for (Object userIdObj : requesters) { + Long userId = (Long) userIdObj; + AuthenticatedUser user = userService.find(userId); + if (user != null) { + retList.add(user); + } + } + + return retList; + } + private List retrieveFileMetadataForVersion(Dataset dataset, DatasetVersion version, List dataFiles, Map filesMap, Map categoryMap) { List retList = new ArrayList<>(); Map> categoryMetaMap = new HashMap<>(); diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 683b6687c8b..305e9a404e7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -858,6 +858,12 @@ public String getHarvestingDescription() { return null; } + public boolean hasEnabledGuestbook(){ + Guestbook gb = this.getGuestbook(); + + return ( gb != null && gb.isEnabled()); + } + @Override public boolean equals(Object object) { // TODO: Warning - this method won't work in the case the id fields are not set diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 33b598083da..8754c26a3b6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -366,6 +366,19 @@ public void setShowIngestSuccess(boolean showIngestSuccess) { this.showIngestSuccess = showIngestSuccess; } + private String termsGuestbookPopupAction = ""; + + public void setTermsGuestbookPopupAction(String popupAction){ + if(popupAction != null && popupAction.length() > 0){ + this.termsGuestbookPopupAction = popupAction; + } + + } + + public String getTermsGuestbookPopupAction(){ + return termsGuestbookPopupAction; + } + // TODO: Consider renaming "configureTools" to "fileConfigureTools". List configureTools = new ArrayList<>(); // TODO: Consider renaming "exploreTools" to "fileExploreTools". @@ -3137,9 +3150,14 @@ public void setValidateFilesOutcome(String validateFilesOutcome) { this.validateFilesOutcome = validateFilesOutcome; } - public boolean validateFilesForDownload(boolean guestbookRequired, boolean downloadOriginal) { - setSelectedDownloadableFiles(new ArrayList<>()); - setSelectedNonDownloadableFiles(new ArrayList<>()); + public boolean validateFilesForDownload(boolean downloadOriginal){ + if (this.selectedFiles.isEmpty()) { + PrimeFaces.current().executeScript("PF('selectFilesForDownload').show()"); + return false; + } else { + this.filterSelectedFiles(); + } + //assume Pass unless something bad happens setValidateFilesOutcome("Pass"); Long bytes = (long) 0; @@ -3170,6 +3188,13 @@ public boolean validateFilesForDownload(boolean guestbookRequired, boolean downl return false; } + +//QDRADA handle new state from + /*if (isTermsPopupRequired() || isGuestbookPopupRequiredAtDownload()){ + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"); + } + */ + // If some of the files were restricted and we had to drop them off the // list, and NONE of the files are left on the downloadable list // - we show them a "you're out of luck" popup: @@ -3183,10 +3208,11 @@ public boolean validateFilesForDownload(boolean guestbookRequired, boolean downl return true; } - if (guestbookRequired) { + //QDRADA - still needed? +/* if (guestbookRequired) { setValidateFilesOutcome("GuestbookRequired"); } - +*/ return true; } @@ -3208,6 +3234,67 @@ private void updateGuestbookResponse (boolean guestbookRequired, boolean downloa guestbookResponse.setDownloadtype("Download"); } + /*helper function to filter the selected files into , + and and for reuse*/ + + private boolean filterSelectedFiles(){ + setSelectedDownloadableFiles(new ArrayList<>()); + setSelectedNonDownloadableFiles(new ArrayList<>()); + setSelectedRestrictedFiles(new ArrayList<>()); + setSelectedUnrestrictedFiles(new ArrayList<>()); + + boolean someFiles = false; + for (FileMetadata fmd : this.selectedFiles){ + if(this.fileDownloadHelper.canDownloadFile(fmd)){ + getSelectedDownloadableFiles().add(fmd); + someFiles=true; + } else { + getSelectedNonDownloadableFiles().add(fmd); + } + if(fmd.isRestricted()){ + getSelectedRestrictedFiles().add(fmd); //might be downloadable to user or not + someFiles=true; + } else { + getSelectedUnrestrictedFiles().add(fmd); + someFiles=true; + } + + } + return someFiles; + } + + public void validateFilesForRequestAccess(){ + this.filterSelectedFiles(); + + if(!dataset.isFileAccessRequest()){ //is this needed? wouldn't be able to click Request Access if this !isFileAccessRequest() + return; + } + + if(!this.selectedRestrictedFiles.isEmpty()){ + ArrayList nonDownloadableRestrictedFiles = new ArrayList<>(); + + List userRequestedDataFiles = ((AuthenticatedUser) session.getUser()).getRequestedDataFiles(); + + for(FileMetadata fmd : this.selectedRestrictedFiles){ + if(!this.fileDownloadHelper.canDownloadFile(fmd) && !userRequestedDataFiles.contains(fmd.getDataFile())){ + nonDownloadableRestrictedFiles.add(fmd); + } + } + + if(!nonDownloadableRestrictedFiles.isEmpty()){ + guestbookResponse.setDataFile(null); + guestbookResponse.setSelectedFileIds(this.getFilesIdsString(nonDownloadableRestrictedFiles)); + + if(this.isGuestbookAndTermsPopupRequired()){ //need to pop up the guestbook and terms dialog + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"); + } else { + this.requestAccessMultipleFiles(); + } + } else { + //popup select data files + } + } + } /*helper function to filter the selected files into , and and for reuse*/ @@ -3295,26 +3382,23 @@ public void toggleAllSelected(){ // helper Method public String getSelectedFilesIdsString() { - String downloadIdString = ""; - for (FileMetadata fmd : this.selectedFiles){ - if (!StringUtil.isEmpty(downloadIdString)) { - downloadIdString += ","; - } - downloadIdString += fmd.getDataFile().getId(); - } - return downloadIdString; + return this.getFilesIdsString(this.selectedFiles); } - + // helper Method public String getSelectedDownloadableFilesIdsString() { - String downloadIdString = ""; - for (FileMetadata fmd : this.selectedDownloadableFiles){ - if (!StringUtil.isEmpty(downloadIdString)) { - downloadIdString += ","; + return this.getFilesIdsString(this.selectedDownloadableFiles); + } + + public String getFilesIdsString(List fileMetadatas){ //for reuse + String idString = ""; + for (FileMetadata fmd : fileMetadatas){ + if (!StringUtil.isEmpty(idString)) { + idString += ","; } - downloadIdString += fmd.getDataFile().getId(); + idString += fmd.getDataFile().getId(); } - return downloadIdString; + return idString; } @@ -5221,6 +5305,10 @@ public boolean isDownloadPopupRequired() { public boolean isRequestAccessPopupRequired() { return FileUtil.isRequestAccessPopupRequired(workingVersion); } + + public boolean isGuestbookAndTermsPopupRequired() { + return FileUtil.isGuestbookAndTermsPopupRequired(workingVersion); + } public String requestAccessMultipleFiles() { @@ -5236,11 +5324,11 @@ public String requestAccessMultipleFiles() { for (FileMetadata fmd : selectedFiles){ fileDownloadHelper.addMultipleFilesForRequestAccess(fmd.getDataFile()); } - if (isRequestAccessPopupRequired()) { + if (isGuestbookAndTermsPopupRequired()) { //RequestContext requestContext = RequestContext.getCurrentInstance(); - PrimeFaces.current().executeScript("PF('requestAccessPopup').show()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show()"); //the popup will call writeGuestbookAndRequestAccess(); return ""; - } else { + }else { //No popup required fileDownloadHelper.requestAccessIndirect(); return ""; diff --git a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java index 76c5df4409a..723a54c8587 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java @@ -1,91 +1,237 @@ package edu.harvard.iq.dataverse; -import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; - -import javax.persistence.Column; -import javax.persistence.Embeddable; -import javax.persistence.EmbeddedId; +import java.io.Serializable; +import javax.persistence.CascadeType; +import javax.persistence.GeneratedValue; +import javax.persistence.UniqueConstraint; import javax.persistence.Entity; +import javax.persistence.Table; +import javax.persistence.Index; +import javax.persistence.Id; import javax.persistence.JoinColumn; +import javax.persistence.JoinTable; import javax.persistence.ManyToOne; -import javax.persistence.MapsId; -import javax.persistence.Table; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; -import java.io.Serializable; -import java.util.Date; +import javax.persistence.OneToOne; +import javax.persistence.EnumType; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.datavariable.DataVariable; +import javax.persistence.Column; +import javax.persistence.Enumerated; +import javax.persistence.GenerationType; +import javax.persistence.NamedQueries; +import javax.persistence.NamedQuery; + +/** + * + * @author Marina + */ @Entity -@Table(name = "fileaccessrequests") -public class FileAccessRequest { - @EmbeddedId - private FileAccessRequestKey id; +@Table(name = "fileaccessrequests", //having added the guestbookresponse_id column to fileaccessrequests + uniqueConstraints=@UniqueConstraint(columnNames={"datafile_id", "authenticated_user_id","request_state"}) //this may not make sense at some future point +) + +@NamedQueries({ + @NamedQuery(name = "FileAccessRequest.findByAuthenticatedUserId", + query = "SELECT far FROM FileAccessRequest far WHERE far.user.id=:authenticatedUserId"), + @NamedQuery(name = "FileAccessRequest.findByGuestbookResponseId", + query = "SELECT far FROM FileAccessRequest far WHERE far.guestbookResponse.id=:guestbookResponseId"), + @NamedQuery(name = "FileAccessRequest.findByDataFileId", + query = "SELECT far FROM FileAccessRequest far WHERE far.dataFile.id=:dataFileId"), + @NamedQuery(name = "FileAccessRequest.findByRequestState", + query = "SELECT far FROM FileAccessRequest far WHERE far.requestState=:requestState"), + @NamedQuery(name = "FileAccessRequest.findByAuthenticatedUserIdAndRequestState", + query = "SELECT far FROM FileAccessRequest far WHERE far.user.id=:authenticatedUserId and far.requestState=:requestState"), + @NamedQuery(name = "FileAccessRequest.findByGuestbookResponseIdAndRequestState", + query = "SELECT far FROM FileAccessRequest far WHERE far.guestbookResponse.id=:guestbookResponseId and far.requestState=:requestState"), + @NamedQuery(name = "FileAccessRequest.findByDataFileIdAndRequestState", + query = "SELECT far FROM FileAccessRequest far WHERE far.dataFile.id=:dataFileId and far.requestState=:requestState"), + @NamedQuery(name = "FileAccessRequest.findByAuthenticatedUserIdAndDataFileIdAndRequestState", + query = "SELECT far FROM FileAccessRequest far WHERE far.user.id=:authenticatedUserId and far.dataFile.id=:dataFileId and far.requestState=:requestState") +}) + +public class FileAccessRequest implements Serializable{ + private static final long serialVersionUID = 1L; + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + @ManyToOne - @MapsId("dataFile") - @JoinColumn(name = "datafile_id") + @JoinColumn(nullable=false) private DataFile dataFile; + @ManyToOne - @MapsId("authenticatedUser") - @JoinColumn(name = "authenticated_user_id") - private AuthenticatedUser authenticatedUser; - - @Temporal(value = TemporalType.TIMESTAMP) - @Column(name = "creation_time") - private Date creationTime; - - public FileAccessRequestKey getId() { + @JoinColumn(name="authenticated_user_id",nullable=false) + private AuthenticatedUser user; + + @OneToOne + @JoinColumn(nullable=true) + private GuestbookResponse guestbookResponse; + + public enum RequestState {CREATED,EDITED,GRANTED,REJECTED,RESUBMIT,INVALIDATED,CLOSED}; + //private RequestState state; + @Enumerated(EnumType.STRING) + @Column(name="request_state", nullable=false ) + private RequestState requestState; + + public FileAccessRequest(){ + + } + + public FileAccessRequest(DataFile df, AuthenticatedUser au){ + setDataFile(df); + setRequester(au); + setState(RequestState.CREATED); + } + + public FileAccessRequest(DataFile df, AuthenticatedUser au, GuestbookResponse gbr){ + setDataFile(df); + setRequester(au); + setGuestbookResponse(gbr); + setState(RequestState.CREATED); + } + + public Long getId() { return id; } - public void setId(FileAccessRequestKey id) { + public void setId(Long id) { this.id = id; } - - public DataFile getDataFile() { + + public DataFile getDataFile(){ return dataFile; } - - public void setDataFile(DataFile dataFile) { - this.dataFile = dataFile; + + public final void setDataFile(DataFile df){ + this.dataFile = df; + } + + public AuthenticatedUser getRequester(){ + return user; + } + + public final void setRequester(AuthenticatedUser au){ + this.user = au; + } + + public GuestbookResponse getGuestbookResponse(){ + return guestbookResponse; + } + + public final void setGuestbookResponse(GuestbookResponse gbr){ + this.guestbookResponse = gbr; + } + + public RequestState getState() { + return this.requestState; + } + + public void setState(RequestState requestState) { + this.requestState = requestState; + } + + public String getStateLabel() { + if(isStateCreated()){ + return "created"; + } + if(isStateEdited()) { + return "edited"; + } + if(isStateGranted()) { + return "granted"; + } + if(isStateRejected()) { + return "rejected"; + } + if(isStateResubmit()) { + return "resubmit"; + } + if(isStateInvalidated()) { + return "invalidated"; + } + if(isStateClosed()) { + return "closed"; + } + return null; + } + + public void setStateCreated() { + this.requestState = RequestState.CREATED; + } + + public void setStateEdited() { + this.requestState = RequestState.EDITED; + } + + public void setStateGranted() { + this.requestState = RequestState.GRANTED; } - public AuthenticatedUser getAuthenticatedUser() { - return authenticatedUser; + public void setStateRejected() { + this.requestState = RequestState.REJECTED; } - public void setAuthenticatedUser(AuthenticatedUser authenticatedUser) { - this.authenticatedUser = authenticatedUser; + public void setStateResubmit() { + this.requestState = RequestState.RESUBMIT; + } + + public void setStateInvalidated() { + this.requestState = RequestState.INVALIDATED; } - public Date getCreationTime() { - return creationTime; + public void setStateClosed() { + this.requestState = RequestState.CLOSED; } - public void setCreationTime(Date creationTime) { - this.creationTime = creationTime; + + public boolean isStateCreated() { + return this.requestState == RequestState.CREATED; + } + + public boolean isStateEdited() { + return this.requestState == RequestState.EDITED; + } + + public boolean isStateGranted() { + return this.requestState == RequestState.GRANTED; } - @Embeddable - public static class FileAccessRequestKey implements Serializable { - @Column(name = "datafile_id") - private Long dataFile; - @Column(name = "authenticated_user_id") - private Long authenticatedUser; + public boolean isStateRejected() { + return this.requestState == RequestState.REJECTED; + } - public Long getDataFile() { - return dataFile; - } + public boolean isStateResubmit() { + return this.requestState == RequestState.RESUBMIT; + } + + public boolean isStateInvalidated() { + return this.requestState == RequestState.INVALIDATED; + } - public void setDataFile(Long dataFile) { - this.dataFile = dataFile; - } + public boolean isStateClosed() { + return this.requestState == RequestState.CLOSED; + } + + @Override + public int hashCode() { + int hash = 0; + hash += (id != null ? id.hashCode() : 0); + return hash; + } - public Long getAuthenticatedUser() { - return authenticatedUser; + @Override + public boolean equals(Object object) { + // TODO: Warning - this method won't work in the case the id fields are not set + if (!(object instanceof FileAccessRequest)) { + return false; } - - public void setAuthenticatedUser(Long authenticatedUser) { - this.authenticatedUser = authenticatedUser; + FileAccessRequest other = (FileAccessRequest) object; + if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { + return false; } + return true; } -} + + +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java new file mode 100644 index 00000000000..215e4695a75 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java @@ -0,0 +1,89 @@ +package edu.harvard.iq.dataverse; + +import java.util.List; +import javax.ejb.Stateless; +import javax.inject.Named; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; + +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; + +/** + * + * @author Marina + */ +@Stateless +@Named +public class FileAccessRequestServiceBean { + + @PersistenceContext(unitName = "VDCNet-ejbPU") + private EntityManager em; + + public FileAccessRequest find(Object pk) { + return em.find(FileAccessRequest.class, pk); + } + + public List findAll() { + return em.createQuery("select object(o) from FileAccessRequest as o order by o.id", FileAccessRequest.class).getResultList(); + } + + public List findAll(Long authenticatedUserId, Long fileId, FileAccessRequest.RequestState requestState){ + return em.createNamedQuery("FileAccessRequest.findByAuthenticatedUserIdAndDataFileIdAndRequestState", FileAccessRequest.class) + .setParameter("authenticatedUserId",authenticatedUserId) + .setParameter("dataFileId",fileId) + .setParameter("requestState",requestState) + .getResultList(); + } + + public List findAllByAuthenticedUserId(Long authenticatedUserId){ + return em.createNamedQuery("FileAccessRequest.findByAuthenticatedUserId", FileAccessRequest.class) + .setParameter("authenticatedUserId", authenticatedUserId) + .getResultList(); + } + + public List findAllByGuestbookResponseId(Long guestbookResponseId){ + return em.createNamedQuery("FileAccessRequest.findByGuestbookResponseId", FileAccessRequest.class) + .setParameter("guestbookResponseId", guestbookResponseId) + .getResultList(); + + } + + public List findAllByDataFileId(Long dataFileId){ + return em.createNamedQuery("FileAccessRequest.findByDataFileId", FileAccessRequest.class) + .setParameter("dataFileId", dataFileId) + .getResultList(); + } + + public List findAllByAuthenticatedUserIdAndRequestState(Long authenticatedUserId, FileAccessRequest.RequestState requestState){ + return em.createNamedQuery("FileAccessRequest.findByAuthenticatedUserIdAndRequestState", FileAccessRequest.class) + .setParameter("authenticatedUserId", authenticatedUserId) + .setParameter("requestState",requestState) + .getResultList(); + } + + public List findAllByGuestbookResponseIdAndRequestState(Long guestbookResponseId, FileAccessRequest.RequestState requestState){ + return em.createNamedQuery("FileAccessRequest.findByGuestbookResponseIdAndRequestState", FileAccessRequest.class) + .setParameter("dataFileId", guestbookResponseId) + .setParameter("requestState",requestState) + .getResultList(); + } + + public List findAllByDataFileIdAndRequestState(Long dataFileId, FileAccessRequest.RequestState requestState){ + return em.createNamedQuery("FileAccessRequest.findByDataFileIdAndRequestState", FileAccessRequest.class) + .setParameter("dataFileId", dataFileId) + .setParameter("requestState",requestState) + .getResultList(); + } + + + public FileAccessRequest save(FileAccessRequest far) { + if (far.getId() == null) { + em.persist(far); + return far; + } else { + return em.merge(far); + } + } + + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java index 850efc2f1ae..e44aeafcc4d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java @@ -71,7 +71,7 @@ private boolean testResponseLength(String value) { // file downloads and multiple (batch) downloads - sice both use the same // terms/etc. popup. public void writeGuestbookAndStartDownload(GuestbookResponse guestbookResponse) { - PrimeFaces.current().executeScript("PF('downloadPopup').hide()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); guestbookResponse.setDownloadtype("Download"); // Note that this method is only ever called from the file-download-popup - // meaning we know for the fact that we DO want to save this @@ -91,7 +91,7 @@ public void writeGuestbookAndStartDownload(GuestbookResponse guestbookResponse) public void writeGuestbookAndOpenSubset(GuestbookResponse guestbookResponse) { - PrimeFaces.current().executeScript("PF('downloadPopup').hide()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); PrimeFaces.current().executeScript("PF('downloadDataSubsetPopup').show()"); guestbookResponse.setDownloadtype("Subset"); fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); @@ -132,17 +132,23 @@ public void writeGuestbookAndLaunchExploreTool(GuestbookResponse guestbookRespon fileDownloadService.explore(guestbookResponse, fmd, externalTool); //requestContext.execute("PF('downloadPopup').hide()"); - PrimeFaces.current().executeScript("PF('downloadPopup').hide()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); } public void writeGuestbookAndLaunchPackagePopup(GuestbookResponse guestbookResponse) { - PrimeFaces.current().executeScript("PF('downloadPopup').hide()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); PrimeFaces.current().executeScript("PF('downloadPackagePopup').show()"); PrimeFaces.current().executeScript("handleResizeDialog('downloadPackagePopup')"); fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); } + public void writeGuestbookResponseAndRequestAccess(GuestbookResponse guestbookResponse) { + //requestContext.execute("PF('guestbookAndTermsPopup').hide()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); + fileDownloadService.writeGuestbookResponseAndRequestAccess(guestbookResponse); + } + /** * Writes a guestbook entry for either popup scenario: guestbook or terms. */ @@ -307,13 +313,13 @@ public void requestAccessMultiple(List files) { } } if (notificationFile != null && succeeded) { - fileDownloadService.sendRequestFileAccessNotification(notificationFile, (AuthenticatedUser) session.getUser()); + fileDownloadService.sendRequestFileAccessNotification(notificationFile.getOwner(), notificationFile.getId(), (AuthenticatedUser) session.getUser()); } } public void requestAccessIndirect() { //Called when there are multiple files and no popup - // or there's a popup with sigular or multiple files + // or there's a popup with singular or multiple files // The list of files for Request Access is set in the Dataset Page when // user clicks the request access button in the files fragment // (and has selected one or more files) @@ -329,8 +335,16 @@ private boolean processRequestAccess(DataFile file, Boolean sendNotification) { // create notification if necessary if (sendNotification) { - fileDownloadService.sendRequestFileAccessNotification(file, user); - } + fileDownloadService.sendRequestFileAccessNotification(file.getOwner(), file.getId(), (AuthenticatedUser) session.getUser()); + } + //ToDO QDRADA - where to write the response? + /* + //write the guestbookResponse if there is an enabled guestbook + GuestbookResponse gbr = this.getGuestbookResponse(); //can we be sure this is the correct guestbookResponse?? - can it get out of sync?? + if( gbr != null && gbr.getGuestbook().isEnabled() ){ + fileDownloadService.writeGuestbookResponseRecordForRequestAccess(gbr); + } + */ JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("file.accessRequested.success")); return true; } diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index a90489be29a..f7612300eaf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -76,6 +76,8 @@ public class FileDownloadServiceBean implements java.io.Serializable { PrivateUrlServiceBean privateUrlService; @EJB SettingsServiceBean settingsService; + @EJB + MailServiceBean mailService; @Inject DataverseSession session; @@ -192,6 +194,38 @@ public void writeGuestbookAndStartFileDownload(GuestbookResponse guestbookRespon redirectToDownloadAPI(guestbookResponse.getFileFormat(), guestbookResponse.getDataFile().getId()); logger.fine("issued file download redirect for datafile "+guestbookResponse.getDataFile().getId()); } + + public void writeGuestbookResponseAndRequestAccess(GuestbookResponse guestbookResponse){ + if (guestbookResponse == null || ( guestbookResponse.getDataFile() == null && guestbookResponse.getSelectedFileIds() == null) ) { + return; + } + + List selectedDataFiles = new ArrayList<>(); //always make sure it's at least an empty List + + if(guestbookResponse.getDataFile() != null ){ //one file 'selected' by 'Request Access' button click + selectedDataFiles.add(datafileService.find(guestbookResponse.getDataFile().getId())); //don't want the findCheapAndEasy + } + + if(guestbookResponse.getSelectedFileIds() != null && !guestbookResponse.getSelectedFileIds().isEmpty()) { //multiple selected through multi-select REquest Access button + selectedDataFiles = datafileService.findAll(guestbookResponse.getSelectedFileIds()); + } + + int countRequestAccessSuccess = 0; + + for(DataFile dataFile : selectedDataFiles){ + guestbookResponse.setDataFile(dataFile); + writeGuestbookResponseRecordForRequestAccess(guestbookResponse); + if(requestAccess(dataFile,guestbookResponse)){ + countRequestAccessSuccess++; + } + } + + if(countRequestAccessSuccess > 0){ + DataFile firstDataFile = selectedDataFiles.get(0); + sendRequestFileAccessNotification(firstDataFile.getOwner(), firstDataFile.getId(), (AuthenticatedUser) session.getUser()); + } + + } public void writeGuestbookResponseRecord(GuestbookResponse guestbookResponse, FileMetadata fileMetadata, String format) { if(!fileMetadata.getDatasetVersion().isDraft()){ @@ -221,6 +255,18 @@ public void writeGuestbookResponseRecord(GuestbookResponse guestbookResponse) { } } + public void writeGuestbookResponseRecordForRequestAccess(GuestbookResponse guestbookResponse) { + try { + CreateGuestbookResponseCommand cmd = new CreateGuestbookResponseCommand(dvRequestService.getDataverseRequest(), guestbookResponse, guestbookResponse.getDataset()); + commandEngine.submit(cmd); + + } catch (CommandException e) { + //if an error occurs here then download won't happen no need for response recs... + logger.info("Failed to writeGuestbookResponseRecord for RequestAccess"); + } + + } + // The "guestBookRecord(s)AlreadyWritten" parameter in the 2 methods // below (redirectToBatchDownloadAPI() and redirectToDownloadAPI(), for the // multiple- and single-file downloads respectively) are passed to the @@ -499,15 +545,63 @@ public boolean requestAccess(Long fileId) { } } return false; - } + } + + public boolean requestAccess(DataFile dataFile, GuestbookResponse gbr){ + boolean accessRequested = false; + if (dvRequestService.getDataverseRequest().getAuthenticatedUser() == null){ + return accessRequested; + } + + List fARs = dataFile.getFileAccessRequesters(); + + if(fARs.isEmpty() || (!fARs.isEmpty() && !fARs.contains((AuthenticatedUser)session.getUser()))){ + try { + commandEngine.submit(new RequestAccessCommand(dvRequestService.getDataverseRequest(), dataFile, gbr)); + accessRequested = true; + } catch (CommandException ex) { + logger.info("Unable to request access for file id " + dataFile.getId() + ". Exception: " + ex); + } + } + + return accessRequested; + } - public void sendRequestFileAccessNotification(DataFile datafile, AuthenticatedUser requestor) { - permissionService.getUsersWithPermissionOn(Permission.ManageFilePermissions, datafile).stream().forEach((au) -> { - userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.REQUESTFILEACCESS, datafile.getId(), null, requestor, false); + public void sendRequestFileAccessNotification(Dataset dataset, Long fileId, AuthenticatedUser requestor) { + permissionService.getUsersWithPermissionOn(Permission.ManageDatasetPermissions, dataset).stream().forEach((au) -> { + userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.REQUESTFILEACCESS, fileId, null, requestor, false); }); } + public void sendRequestFileAccessNotification(Dataset dataset, Long fileId, GuestbookResponse gb){ + Timestamp ts = new Timestamp(new Date().getTime()); + UserNotification un = null; + + //String appendMsgText = (gb == null)?("") : this.getGuestbookAppendEmailDetails(gb); + String appendMsgText = ""; + + //first send a notification for all the Users that have ManageDatasetPermissions a notification that a user has requested accedd + List mngDsPermUsers = permissionService.getUsersWithPermissionOn(Permission.ManageDatasetPermissions, dataset); + + for (AuthenticatedUser au : mngDsPermUsers){ + un = userNotificationService.sendUserNotification(au, ts, UserNotification.Type.REQUESTFILEACCESS, fileId); + + if(un != null){ + + boolean mailed = mailService.sendNotificationEmail(un, appendMsgText, (AuthenticatedUser)session.getUser(),false); + if(mailed){ + un.setEmailed(true); + userNotificationService.save(un); + } + } + } + + //send the user that requested access a notification that they requested the access + userNotificationService.sendNotification((AuthenticatedUser) session.getUser(), ts, UserNotification.Type.REQUESTEDFILEACCESS, fileId); + } + + public String generateServiceKey() { UUID uid = UUID.randomUUID(); // last 8 bytes, of the random UUID, 16 hex digits: diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index e6d5cc75ca3..5845d65889e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -318,6 +318,12 @@ public boolean isRequestAccessPopupRequired() { return FileUtil.isRequestAccessPopupRequired(fileMetadata.getDatasetVersion()); } + public boolean isGuestbookAndTermsPopupRequired() { + if(fileMetadata.getId() == null || fileMetadata.getDatasetVersion().getId() == null ){ + return false; + } + return FileUtil.isGuestbookAndTermsPopupRequired(fileMetadata.getDatasetVersion()); + } public void setFileMetadata(FileMetadata fileMetadata) { this.fileMetadata = fileMetadata; diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 69404482fce..5c39d1039d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -65,8 +65,12 @@ public class GuestbookResponse implements Serializable { @JoinColumn(nullable=true) private AuthenticatedUser authenticatedUser; - @OneToOne(cascade=CascadeType.ALL,mappedBy="guestbookResponse",fetch = FetchType.LAZY, optional = false) + @OneToOne(cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST},mappedBy="guestbookResponse",fetch = FetchType.LAZY) private FileDownload fileDownload; + + @OneToMany(mappedBy="guestbookResponse",cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST},fetch = FetchType.LAZY) + //private FileAccessRequest fileAccessRequest; + private List fileAccessRequests; @OneToMany(mappedBy="guestbookResponse",cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST},orphanRemoval=true) @OrderBy ("id") @@ -253,6 +257,13 @@ public void setFileDownload(FileDownload fDownload){ this.fileDownload = fDownload; } + public List getFileAccessRequests(){ + return fileAccessRequests; + } + + public void setFileAccessRequest(List fARs){ + this.fileAccessRequests = fARs; + } public Dataset getDataset() { return dataset; diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index e71e04bc42f..173af4c241f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -56,6 +56,8 @@ public class ManagePermissionsPage implements java.io.Serializable { @EJB DvObjectServiceBean dvObjectService; @EJB + FileAccessRequestServiceBean fileAccessRequestService; + @EJB DataverseRoleServiceBean roleService; @EJB RoleAssigneeServiceBean roleAssigneeService; @@ -400,6 +402,51 @@ public List completeRoleAssignee( String query ) { return roleAssigneeService.filterRoleAssignees(query, dvObject, roleAssignSelectedRoleAssignees); } + public void grantAccess(ActionEvent evt) { + //QDRADA + logger.info("grantAccess Called"); + try { + throw new Exception("grantAccessCalled"); + } catch (Exception e) { + e.printStackTrace(); + } + /* + // Find the built in file downloader role (currently by alias) + DataverseRole fileDownloaderRole = roleService.findBuiltinRoleByAlias(DataverseRole.FILE_DOWNLOADER); + for (RoleAssignee roleAssignee : selectedRoleAssignees) { + boolean sendNotification = false; + for (DataFile file : selectedFiles) { + if (assignRole(roleAssignee, file, fileDownloaderRole)) { + if (file.isReleased()) { + sendNotification = true; + } + // remove request, if it exist + for (AuthenticatedUser au : roleAssigneeService.getExplicitUsers(roleAssignee)) { + if (file.getFileAccessRequesters().remove(au)) { + List fileAccessRequests = fileAccessRequestService.findAllByAuthenticatedUserIdAndRequestState(au.getId(), FileAccessRequest.RequestState.CREATED); + for(FileAccessRequest far : fileAccessRequests){ + far.setStateGranted(); + fileAccessRequestService.save(far); + } + file.setFileAccessRequests(fileAccessRequests); + datafileService.save(file); + } + } + } + + } + + if (sendNotification) { + for (AuthenticatedUser au : roleAssigneeService.getExplicitUsers(roleAssignee)) { + userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.GRANTFILEACCESS, dataset.getId()); + } + } + } + + initMaps(); + */ + } + public List getAvailableRoles() { List roles = new LinkedList<>(); if (dvObject != null && dvObject.getId() != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java index b68a1b9d13e..c91f7630caa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java @@ -39,7 +39,7 @@ public enum Type { CHECKSUMIMPORT, CHECKSUMFAIL, CONFIRMEMAIL, APIGENERATED, INGESTCOMPLETED, INGESTCOMPLETEDWITHERRORS, PUBLISHFAILED_PIDREG, WORKFLOW_SUCCESS, WORKFLOW_FAILURE, STATUSUPDATED, DATASETCREATED, DATASETMENTIONED, GLOBUSUPLOADCOMPLETED, GLOBUSUPLOADCOMPLETEDWITHERRORS, - GLOBUSDOWNLOADCOMPLETED, GLOBUSDOWNLOADCOMPLETEDWITHERRORS; + GLOBUSDOWNLOADCOMPLETED, GLOBUSDOWNLOADCOMPLETEDWITHERRORS, REQUESTEDFILEACCESS; public String getDescription() { return BundleUtil.getStringFromBundle("notification.typeDescription." + this.name()); diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java index 947ee3ce989..972f26f6830 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java @@ -131,6 +131,28 @@ public void sendNotification(AuthenticatedUser dataverseUser, Timestamp sendDate save(userNotification); } } + + /** + * Returns a UserNotification that was sent to a dataverseUser. + * Sends ONLY the UserNotification (no email is sent via this method). + * All parameters are assumed to be valid, non-null objects. + * + * @param dataverseUser - the AuthenticatedUser to whom the notification is to be sent + * @param sendDate - the time and date the notification was sent. + * @param type - the type of notification to be sent (see UserNotification for the different types) + * @param objectId - the ID of the Dataverse object (Dataverse, Dataset, etc.) that the notification pertains to + * @return The UserNotification that was sent to the dataverseUser + */ + + public UserNotification sendUserNotification(AuthenticatedUser dataverseUser, Timestamp sendDate, Type type, Long objectId) { + UserNotification userNotification = new UserNotification(); + userNotification.setUser(dataverseUser); + userNotification.setSendDate(sendDate); + userNotification.setType(type); + userNotification.setObjectId(objectId); + this.save(userNotification); + return userNotification; + } public boolean isEmailMuted(UserNotification userNotification) { final Type type = userNotification.getType(); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index 9fdfce2f1a7..bb688fb8acb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -1,7 +1,9 @@ package edu.harvard.iq.dataverse.authorization.users; import edu.harvard.iq.dataverse.Cart; +import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DatasetLock; +import edu.harvard.iq.dataverse.FileAccessRequest; import edu.harvard.iq.dataverse.UserNotification.Type; import edu.harvard.iq.dataverse.UserNotification; import edu.harvard.iq.dataverse.validation.ValidateEmail; @@ -17,6 +19,7 @@ import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import java.io.Serializable; import java.sql.Timestamp; +import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; @@ -28,6 +31,7 @@ import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; +import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; @@ -202,6 +206,27 @@ public void setDatasetLocks(List datasetLocks) { @OneToMany(mappedBy = "user", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) private List oAuth2TokenDatas; + /*for many to many fileAccessRequests*/ + @OneToMany(mappedBy = "user", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST, CascadeType.REFRESH}, fetch = FetchType.LAZY) + private List fileAccessRequests; + + public List getFileAccessRequests() { + return fileAccessRequests; + } + + public void setFileAccessRequests(List fARs) { + this.fileAccessRequests = fARs; + } + + public List getRequestedDataFiles(){ + List requestedDataFiles = new ArrayList<>(); + + for(FileAccessRequest far : getFileAccessRequests()){ + requestedDataFiles.add(far.getDataFile()); + } + return requestedDataFiles; + } + @Override public AuthenticatedUserDisplayInfo getDisplayInfo() { return new AuthenticatedUserDisplayInfo(firstName, lastName, email, affiliation, position); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java index d710ed66551..f6a3b287778 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java @@ -5,7 +5,11 @@ */ package edu.harvard.iq.dataverse.engine.command.impl; +import java.util.logging.Logger; + import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.FileAccessRequest; +import edu.harvard.iq.dataverse.GuestbookResponse; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; @@ -23,8 +27,11 @@ @RequiredPermissions({}) public class RequestAccessCommand extends AbstractCommand { + private static final Logger logger = Logger.getLogger(RequestAccessCommand.class.getName()); + private final DataFile file; private final AuthenticatedUser requester; + private final FileAccessRequest fileAccessRequest; private final Boolean sendNotification; @@ -33,6 +40,7 @@ public RequestAccessCommand(DataverseRequest dvRequest, DataFile file) { super(dvRequest, file); this.file = file; this.requester = (AuthenticatedUser) dvRequest.getUser(); + this.fileAccessRequest = new FileAccessRequest(file,requester); this.sendNotification = false; } @@ -41,8 +49,27 @@ public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, Boolean s super(dvRequest, file); this.file = file; this.requester = (AuthenticatedUser) dvRequest.getUser(); + this.fileAccessRequest = new FileAccessRequest(file,requester); this.sendNotification = sendNotification; } + + public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr) { + // for data file check permission on owning dataset + super(dvRequest, file); + this.file = file; + this.requester = (AuthenticatedUser) dvRequest.getUser(); + this.fileAccessRequest = new FileAccessRequest(file,requester,gbr); + this.sendNotification = false; + } + + public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr, Boolean sendNotification) { + // for data file check permission on owning dataset + super(dvRequest, file); + this.file = file; + this.requester = (AuthenticatedUser) dvRequest.getUser(); + this.fileAccessRequest = new FileAccessRequest(file,requester,gbr); + this.sendNotification = sendNotification; + } public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr) { @@ -75,9 +102,13 @@ public DataFile execute(CommandContext ctxt) throws CommandException { if(FileUtil.isActivelyEmbargoed(file)) { throw new CommandException(BundleUtil.getStringFromBundle("file.requestAccess.notAllowed.embargoed"), this); } + file.getFileAccessRequests().add(fileAccessRequest); file.addFileAccessRequester(requester); + requester.getFileAccessRequests().add(fileAccessRequest); if (sendNotification) { - ctxt.fileDownload().sendRequestFileAccessNotification(this.file, requester); + //QDRADA + logger.info("ctxt.fileDownload().sendRequestFileAccessNotification(this.file, requester);"); + //ctxt.fileDownload().sendRequestFileAccessNotification(this.file, requester); } return ctxt.files().save(file); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 6bb7e1d583b..a5fb98f7c49 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -28,6 +28,7 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Embargo; import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.dataaccess.S3AccessIO; @@ -1639,6 +1640,71 @@ private static Boolean popupDueToStateOrTerms(DatasetVersion datasetVersion) { return null; } + /** + * isGuestbookAndTermsPopupRequired + * meant to replace both isDownloadPopupRequired() and isRequestAccessDownloadPopupRequired() when the guestbook-terms-popup-fragment.xhtml + * replaced file-download-popup-fragment.xhtml and file-request-access-popup-fragment.xhtml + * @param datasetVersion + * @return boolean + */ + + public static boolean isGuestbookAndTermsPopupRequired(DatasetVersion datasetVersion) { + return isGuestbookPopupRequired(datasetVersion) || isTermsPopupRequired(datasetVersion); + } + + public static boolean isGuestbookPopupRequired(DatasetVersion datasetVersion) { + + if (datasetVersion == null) { + logger.fine("GuestbookPopup not required because datasetVersion is null."); + return false; + } + //0. if version is draft then Popup "not required" + if (!datasetVersion.isReleased()) { + logger.fine("GuestbookPopup not required because datasetVersion has not been released."); + return false; + } + + // 3. Guest Book: + if (datasetVersion.getDataset() != null && datasetVersion.getDataset().getGuestbook() != null && datasetVersion.getDataset().getGuestbook().isEnabled() && datasetVersion.getDataset().getGuestbook().getDataverse() != null) { + logger.fine("GuestbookPopup required because an enabled guestbook exists."); + return true; + } + + logger.fine("GuestbookPopup is not required."); + return false; + } + + public static boolean isTermsPopupRequired(DatasetVersion datasetVersion) { + + if (datasetVersion == null) { + logger.fine("TermsPopup not required because datasetVersion is null."); + return false; + } + //0. if version is draft then Popup "not required" + if (!datasetVersion.isReleased()) { + logger.fine("TermsPopup not required because datasetVersion has not been released."); + return false; + } + // 1. License and Terms of Use: + if (datasetVersion.getTermsOfUseAndAccess() != null) { + if (!License.CC0.equals(datasetVersion.getTermsOfUseAndAccess().getLicense()) + && !(datasetVersion.getTermsOfUseAndAccess().getTermsOfUse() == null + || datasetVersion.getTermsOfUseAndAccess().getTermsOfUse().equals(""))) { + logger.fine("TermsPopup required because of license or terms of use."); + return true; + } + + // 2. Terms of Access: + if (!(datasetVersion.getTermsOfUseAndAccess().getTermsOfAccess() == null) && !datasetVersion.getTermsOfUseAndAccess().getTermsOfAccess().equals("")) { + logger.fine("TermsPopup required because of terms of access."); + return true; + } + } + + logger.fine("TermsPopup is not required."); + return false; + } + /** * Provide download URL if no Terms of Use, no guestbook, and not * restricted. diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java index 72980c3451a..d166cc753cc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java @@ -51,6 +51,8 @@ public static String getSubjectTextBasedOnNotification(UserNotification userNoti return BundleUtil.getStringFromBundle("notification.email.create.dataverse.subject", rootDvNameAsList); case REQUESTFILEACCESS: return BundleUtil.getStringFromBundle("notification.email.request.file.access.subject", rootDvNameAsList); + case REQUESTEDFILEACCESS: + return BundleUtil.getStringFromBundle("notification.email.requested.file.access.subject", rootDvNameAsList); case GRANTFILEACCESS: return BundleUtil.getStringFromBundle("notification.email.grant.file.access.subject", rootDvNameAsList); case REJECTFILEACCESS: diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 4ba6ad0e7e1..a79e421fa58 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1048,11 +1048,11 @@

    #{bundle['dataset.downloadUnrestricted']}

    + rendered="#{DatasetPage.guestbookAndTermsPopupRequired and !settingsWrapper.rsyncDownload}" + oncomplete="PF('guestbookAndTermsPopup').show();" /> @@ -1494,12 +1494,12 @@
    - + - + - + @@ -1530,19 +1530,11 @@ - + - - - - - - - -
    @@ -1883,7 +1875,7 @@ PF('downloadInvalid').show(); } if (outcome ==='GuestbookRequired'){ - PF('downloadPopup').show(); + PF('guestbookAndTermsPopup').show(); } } diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index 597d9a12786..4b075eb3377 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -60,7 +60,7 @@
  • - #{bundle['file.globus.of']} #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} - + update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + GT: #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} @@ -85,7 +86,7 @@
  • - #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} - + update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} - #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} - + update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} @@ -134,23 +136,24 @@
  • - #{bundle['file.downloadBtn.format.all']} - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{bundle['file.downloadBtn.format.all']}
  • - @@ -158,12 +161,13 @@ - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + @@ -171,35 +175,37 @@
  • - #{bundle['file.downloadBtn.format.tab']} - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{bundle['file.downloadBtn.format.tab']}
  • - #{bundle['file.downloadBtn.format.rdata']} - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{bundle['file.downloadBtn.format.rdata']} @@ -215,18 +221,19 @@
  • - #{bundle['file.downloadBtn.format.var']} - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{bundle['file.downloadBtn.format.var']}
  • @@ -303,20 +310,21 @@
  • - #{tool.getDisplayNameLang()} - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{tool.getDisplayNameLang()}
  • diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index ae8729fdf89..d27536cb892 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -196,7 +196,7 @@ or FilePage.fileMetadata.dataFile.filePackage and systemConfig.HTTPDownload}"> - + @@ -343,11 +343,11 @@ - + - + @@ -356,7 +356,7 @@ - + From bc42df0946371c1af41560b79b5324ed5b565b99 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 19 May 2023 13:12:26 -0400 Subject: [PATCH 0193/1525] typo from merge --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index c0b4fc104ad..a3160a6e48b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3127,7 +3127,7 @@ public void startDownloadSelectedOriginal() { private void startDownload(boolean downloadOriginal){ boolean guestbookRequired = isDownloadPopupRequired(); - boolean validate = validateFilesForDownload(guestbookRequired); + boolean validate = validateFilesForDownload(downloadOriginal); if (validate) { updateGuestbookResponse(guestbookRequired, downloadOriginal); if(!guestbookRequired && !getValidateFilesOutcome().equals("Mixed")){ From 57e984b0e468a55f578b3b21e4787c1a63e4dca9 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 19 May 2023 13:27:52 -0400 Subject: [PATCH 0194/1525] fix for #9601 --- .../edu/harvard/iq/dataverse/ingest/IngestServiceBean.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 7cdfda8d082..5a353453fe8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -332,9 +332,7 @@ public List saveAndAddFilesToDataset(DatasetVersion version, } catch (IOException e) { logger.warning("Error getting ingest limit for file: " + dataFile.getIdentifier() + " : " + e.getMessage()); } - if (unattached) { - dataFile.setOwner(null); - } + if (savedSuccess && belowLimit) { // These are all brand new files, so they should all have // one filemetadata total. -- L.A. @@ -388,6 +386,9 @@ public List saveAndAddFilesToDataset(DatasetVersion version, dataFile.setContentType(FileUtil.MIME_TYPE_TSV); } } + if (unattached) { + dataFile.setOwner(null); + } // ... and let's delete the main temp file if it exists: if(tempLocationPath!=null) { try { From 0c76f7b02b4214efa37b5ac7a5d23f308afae5d5 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 19 May 2023 15:53:09 -0400 Subject: [PATCH 0195/1525] remove QDR updates --- src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java index 34176f7fb26..1d481f18cf5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java @@ -138,7 +138,7 @@ public void testIsDownloadPopupRequiredLicenseCC0() { DatasetVersion dsv1 = new DatasetVersion(); dsv1.setVersionState(DatasetVersion.VersionState.RELEASED); TermsOfUseAndAccess termsOfUseAndAccess = new TermsOfUseAndAccess(); - License license = new License("CC0 1.0", "Creative Commons CC0 1.0 Universal Public Domain Dedication.", URI.create("https://creativecommons.org/publicdomain/zero/1.0"), URI.create("https://licensebuttons.net/p/zero/1.0/88x31.png"), true,1L); + License license = new License("CC0", "You can copy, modify, distribute and perform the work, even for commercial purposes, all without asking permission.", URI.create("http://creativecommons.org/publicdomain/zero/1.0"), URI.create("/resources/images/cc0.png"), true, 1l); license.setDefault(true); termsOfUseAndAccess.setLicense(license); dsv1.setTermsOfUseAndAccess(termsOfUseAndAccess); From d89f3017031aaef0991edc7bf92f8172682b1ddf Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 19 May 2023 21:43:29 +0100 Subject: [PATCH 0196/1525] Added: getAnonymizedDraftVersion datasets endpoint --- .../harvard/iq/dataverse/api/Datasets.java | 9 ++++-- .../iq/dataverse/dataset/DatasetUtil.java | 16 ++++++++++ .../iq/dataverse/dataset/DatasetUtilTest.java | 31 +++++++++++++++++++ 3 files changed, 53 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index bb47333ed75..fe3fa13b8d7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3868,15 +3868,18 @@ public Response getDatasetSummaryFieldNames() { @GET @AuthRequired - @Path("anonymizedDraftVersion/{privateUrlToken}") + @Path("anonymizedDraftVersions/{privateUrlToken}") public Response getAnonymizedDraftVersion(@Context ContainerRequestContext crc, @PathParam("privateUrlToken") String privateUrlToken, @QueryParam("anonymizedFieldValue") String anonymizedFieldValue) { - // TODO: replace fields specified in AnonymizedFieldTypeNames setting with anonymizedFieldValue + String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames); + if (anonymizedFieldTypeNames == null) { + throw new NotAcceptableException("Anonymized Access not enabled"); + } return response(req -> { DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken); return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") - : ok(json(dsv)); + : ok(json(DatasetUtil.anonymizeDatasetVersion(dsv, anonymizedFieldTypeNames, anonymizedFieldValue))); }, getRequestUser(crc)); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index a75775810d9..9b51a194733 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetField; +import edu.harvard.iq.dataverse.DatasetFieldValue; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; @@ -620,4 +621,19 @@ public static String getLocaleExternalStatus(String status) { } return localizedName; } + + public static DatasetVersion anonymizeDatasetVersion(DatasetVersion datasetVersion, String anonymizedFieldTypeNames, String anonymizedFieldValue) { + List anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s"))); + List datasetFields = datasetVersion.getDatasetFields(); + for (DatasetField datasetField : datasetFields) { + if (anonymizedFieldTypeNamesList.contains(datasetField.getDatasetFieldType().getName())) { + List datasetFieldValues = datasetField.getDatasetFieldValues(); + for (DatasetFieldValue datasetFieldValue : datasetFieldValues) { + datasetFieldValue.setValue((anonymizedFieldValue == null) ? BundleUtil.getStringFromBundle("dataset.anonymized.withheld") : anonymizedFieldValue); + } + datasetField.setDatasetFieldValues(datasetFieldValues); + } + } + return datasetVersion; + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java index 46bce999c60..5e59e044095 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java @@ -4,6 +4,7 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetFieldType; +import edu.harvard.iq.dataverse.DatasetFieldValue; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; @@ -176,4 +177,34 @@ public void testGetDatasetSummaryFieldNames_notEmptyCustomFields() { assertArrayEquals(expected, actual); } + + @Test + public void testAnonymizeDatasetVersion() { + DatasetVersion testDatasetVersion = new DatasetVersion(); + + List testDatasetFields = new ArrayList<>(); + String[] fieldNames = {"author", "subject", "keyword"}; + for (String fieldName : fieldNames) { + DatasetField datasetField = DatasetField.createNewEmptyDatasetField(new DatasetFieldType(fieldName, FieldType.TEXT, false), testDatasetVersion); + DatasetFieldValue datasetFieldValue = new DatasetFieldValue(datasetField, "testValue"); + datasetField.setDatasetFieldValues(List.of(datasetFieldValue)); + testDatasetFields.add(datasetField); + } + testDatasetVersion.setDatasetFields(testDatasetFields); + + String testAnonymizedFieldNames = "subject, keyword"; + String testAnonymizedFieldValue = "testValueToAnonymize"; + DatasetVersion actualVersion = DatasetUtil.anonymizeDatasetVersion(testDatasetVersion, testAnonymizedFieldNames, testAnonymizedFieldValue); + + // We check that the fields to be anonymized are successfully anonymized and others remain as originally + List actualVersionDatasetFields = actualVersion.getDatasetFields(); + for (DatasetField datasetField : actualVersionDatasetFields) { + String datasetFieldValue = datasetField.getDatasetFieldValues().get(0).getValue(); + if (testAnonymizedFieldNames.contains(datasetField.getDatasetFieldType().getName())) { + assertEquals(testAnonymizedFieldValue, datasetFieldValue); + } else { + assertNotEquals(testAnonymizedFieldValue, datasetFieldValue); + } + } + } } From 45d9042a226cc53cb8f8a09902a849889786cc0b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 19 May 2023 23:42:32 +0200 Subject: [PATCH 0197/1525] style(api): fix typos and style issues in BearerTokenAuthMechanism --- .../api/auth/BearerTokenAuthMechanism.java | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java index c4b03728179..856670523b1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java @@ -23,6 +23,8 @@ public class BearerTokenAuthMechanism implements AuthMechanism { private static final String BEARER_AUTH_SCHEME = "Bearer"; + private static final Logger logger = Logger.getLogger(BearerTokenAuthMechanism.class.getCanonicalName()); + public static final String UNAUTHORIZED_BEARER_TOKEN = "Unauthorized bearer token"; public static final String INVALID_BEARER_TOKEN = "Could not parse bearer token"; public static final String BEARER_TOKEN_DETECTED_NO_OIDC_PROVIDER_CONFIGURED = "Bearer token detected, no OIDC provider configured"; @@ -31,18 +33,19 @@ public class BearerTokenAuthMechanism implements AuthMechanism { protected AuthenticationServiceBean authSvc; @Inject protected UserServiceBean userSvc; - private static final Logger logger = Logger.getLogger(BearerTokenAuthMechanism.class.getCanonicalName()); + @Override public User findUserFromRequest(ContainerRequestContext containerRequestContext) throws WrappedAuthErrorResponse { if (FeatureFlags.API_BEARER_AUTH.enabled()) { Optional bearerToken = getRequestApiKey(containerRequestContext); // No Bearer Token present, hence no user can be authenticated - if (!bearerToken.isPresent()) { + if (bearerToken.isEmpty()) { return null; } + // Validate and verify provided Bearer Token, and retrieve UserRecordIdentifier // TODO: Get the identifier from an invalidating cache to avoid lookup bursts of the same token. Tokens in the cache should be removed after some (configurable) time. - UserRecordIdentifier userInfo = verifyOidcBearerTokenAndGetUserIndentifier(bearerToken.get()); + UserRecordIdentifier userInfo = verifyOidcBearerTokenAndGetUserIdentifier(bearerToken.get()); // retrieve Authenticated User from AuthService AuthenticatedUser authUser = authSvc.lookupUser(userInfo); @@ -67,7 +70,7 @@ public User findUserFromRequest(ContainerRequestContext containerRequestContext) * @param token The string containing the encoded JWT * @return */ - private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIndentifier(String token) throws WrappedAuthErrorResponse { + private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIdentifier(String token) throws WrappedAuthErrorResponse { try { BearerAccessToken accessToken = BearerAccessToken.parse(token); // Get list of all authentication providers using Open ID Connect @@ -108,7 +111,7 @@ private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIndentifier(String t * Retrieve the raw, encoded token value from the Authorization Bearer HTTP header as defined in RFC 6750 * @return An {@link Optional} either empty if not present or the raw token from the header */ - private Optional getRequestApiKey(ContainerRequestContext containerRequestContext) { + private Optional getRequestApiKey(ContainerRequestContext containerRequestContext) { String headerParamApiKey = containerRequestContext.getHeaderString(HttpHeaders.AUTHORIZATION); if (headerParamApiKey != null && headerParamApiKey.toLowerCase().startsWith(BEARER_AUTH_SCHEME.toLowerCase() + " ")) { return Optional.of(headerParamApiKey); From bb49ea52f482b1b6466e124ca926453670699b09 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 19 May 2023 23:47:31 +0200 Subject: [PATCH 0198/1525] refactor(api,auth): improve OIDCAuthProvider.getUserIdentifier - Reuse more existing code paths to avoid duplication - Make actual use of returning an empty optional - Remove no longer checked exception OAuth2Exception - Improve Javadocs of method - Don't just retrieve and bail out on fail but provide smaller analysis steps with logs - Rename method to be more concise in name selection - Change BearerTokenAuthMechanism accordingly --- .../api/auth/BearerTokenAuthMechanism.java | 5 +- .../oauth2/oidc/OIDCAuthProvider.java | 47 +++++++++++++++---- 2 files changed, 39 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java index 856670523b1..eeabcba9f06 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java @@ -5,7 +5,6 @@ import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; -import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception; import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthProvider; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; @@ -88,12 +87,12 @@ private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIdentifier(String to for (OIDCAuthProvider provider : providers) { try { // The OIDCAuthProvider need to verify a Bearer Token and equip the client means to identify the corresponding AuthenticatedUser. - Optional userInfo = provider.getUserIdentifierForValidToken(accessToken); + Optional userInfo = provider.getUserIdentifier(accessToken); if(userInfo.isPresent()) { logger.log(Level.FINE, "Bearer token detected, provider {0} confirmed validity and provided identifier", provider.getId()); return userInfo.get(); } - } catch ( IOException| OAuth2Exception e) { + } catch (IOException e) { logger.log(Level.FINE, "Bearer token detected, provider " + provider.getId() + " indicates an invalid Token, skipping", e); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index 4cf2eeb626a..52362f7abeb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -46,6 +46,7 @@ import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; +import java.util.logging.Level; import java.util.logging.Logger; /** @@ -305,16 +306,42 @@ Optional getUserInfo(BearerAccessToken accessToken) throws IOException } /** - * Returns the UserRecordIdentifier corresponding to the given accessToken if valid. - * UserRecordIdentifier (same used as in OAuth2UserRecord), i.e. can be used to find a local UserAccount. - * @param accessToken - * @return Returns the UserRecordIdentifier corresponding to the given accessToken if valid. - * @throws IOException - * @throws OAuth2Exception + * Trades an access token for an {@link UserRecordIdentifier} (if valid). + * + * @apiNote The resulting {@link UserRecordIdentifier} may be used with + * {@link edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean#lookupUser(UserRecordIdentifier)} + * to look up an {@link edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser} from the database. + * @see edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism + * + * @param accessToken The token to use when requesting user information from the provider + * @return Returns an {@link UserRecordIdentifier} for a valid access token or an empty {@link Optional}. + * @throws IOException In case communication with the endpoint fails to succeed for an I/O reason */ - public Optional getUserIdentifierForValidToken(BearerAccessToken accessToken) throws IOException, OAuth2Exception{ - // Request the UserInfoEndpoint to obtain UserInfo, since this endpoint also validate the Token we can reuse the existing code path. - // As an alternative we could use the Introspect Endpoint or assume the Token as some encoded information (i.e. JWT). - return Optional.of(new UserRecordIdentifier( this.getId(), getUserInfo(accessToken).get().getSubject().getValue())); + public Optional getUserIdentifier(BearerAccessToken accessToken) throws IOException { + OAuth2UserRecord userRecord; + try { + // Try to retrieve with given token (throws if invalid token) + Optional userInfo = getUserInfo(accessToken); + + if (userInfo.isPresent()) { + // Take this detour to avoid code duplication and potentially hard to track conversion errors. + userRecord = getUserRecord(userInfo.get()); + } else { + // This should not happen - an error at the provider side will lead to an exception. + logger.log(Level.WARNING, + "User info retrieval from {0} returned empty optional but expected exception for token {1}.", + List.of(getId(), accessToken).toArray() + ); + return Optional.empty(); + } + } catch (OAuth2Exception e) { + logger.log(Level.FINE, + "Could not retrieve user info with token {0} at provider {1}: {2}", + List.of(accessToken, getId(), e.getMessage()).toArray()); + logger.log(Level.FINER, "Retrieval failed, details as follows: ", e); + return Optional.empty(); + } + + return Optional.of(userRecord.getUserRecordIdentifier()); } } From cecb034ad7e2be7c47496e0197a7c591e2be503e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sat, 20 May 2023 00:01:38 +0200 Subject: [PATCH 0199/1525] refactor(api): add TODO in BearerTokenAuthMechanism not to neglect IO exceptions --- .../iq/dataverse/api/auth/BearerTokenAuthMechanism.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java index eeabcba9f06..e26717e97b1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java @@ -93,6 +93,9 @@ private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIdentifier(String to return userInfo.get(); } } catch (IOException e) { + // TODO: Just logging this is not sufficient - if there is an IO error with the one provider + // which would have validated successfully, this is not the users fault. We need to + // take note and refer to that later when occurred. logger.log(Level.FINE, "Bearer token detected, provider " + provider.getId() + " indicates an invalid Token, skipping", e); } } From 647315cb52fbdbb45fb33492bdcbd9c829c73a16 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sun, 21 May 2023 22:49:47 +0200 Subject: [PATCH 0200/1525] feat(test): extend JUnit5 test helper JvmSettings - Adding a @JvmSetting to a testclass now applies it before all test methods (will be reset if necessary after all tests ran) - Enable deleting existing settings (which allows to override a class wide setting for example) by setting value to "null" - Introduce settings broker with a first simple local implementation as extension point to set these settings out somewhere (TM). - Broker requires annotation which one to use within a class at class level --- .../util/testing/JvmSettingBroker.java | 43 +++++ .../util/testing/JvmSettingExtension.java | 164 ++++++++++++------ .../util/testing/LocalJvmSettings.java | 39 +++++ 3 files changed, 191 insertions(+), 55 deletions(-) create mode 100644 src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingBroker.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/util/testing/LocalJvmSettings.java diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingBroker.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingBroker.java new file mode 100644 index 00000000000..1235df89b3e --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingBroker.java @@ -0,0 +1,43 @@ +package edu.harvard.iq.dataverse.util.testing; + +import java.io.IOException; + +/** + * Provide an interface to access and manipulate {@link edu.harvard.iq.dataverse.settings.JvmSettings} + * at some place (local, remote, different ways to access, etc.). + * Part of the {@link JvmSettingExtension} extension to allow JUnit5 tests to manipulate these + * settings, enabling to test different code paths and so on. + * @implNote Keep in mind to use methods that do not require restarts or similar to set or delete a setting. + * This must be changeable on the fly, otherwise it will be useless for testing. + * Yes, non-hot-reloadable settings may be a problem. The code should be refactored in these cases. + */ +public interface JvmSettingBroker { + + /** + * Receive the value of a {@link edu.harvard.iq.dataverse.settings.JvmSettings} given as its {@link String} + * representation. The reason for this is that we may have inserted variable names already. + * @param key The JVM setting to receive as key, e.g. "dataverse.fqdn". + * @return The value of the setting if present or null. + * @throws IOException When communication goes sideways. + */ + String getJvmSetting(String key) throws IOException; + + /** + * Set the value of a {@link edu.harvard.iq.dataverse.settings.JvmSettings} (given as its {@link String} + * representation). The reason for this is that we may have inserted variable names already. + * @param key The JVM setting to receive as key, e.g. "dataverse.fqdn". + * @param value The JVM setting's value we want to have it set to. + * @throws IOException When communication goes sideways. + */ + void setJvmSetting(String key, String value) throws IOException; + + /** + * Remove the value of a {@link edu.harvard.iq.dataverse.settings.JvmSettings} (given as its {@link String} + * representation). For some tests, one might want to clear a certain setting again and potentially have it set + * back afterward. The reason for this is that we may have inserted variable names already. + * @param key The JVM setting to receive as key, e.g. "dataverse.fqdn". + * @throws IOException When communication goes sideways. + */ + String deleteJvmSetting(String key) throws IOException; + +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java index 17728e75ffc..2065d7b3ae6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java @@ -1,81 +1,124 @@ package edu.harvard.iq.dataverse.util.testing; import edu.harvard.iq.dataverse.settings.JvmSettings; +import org.junit.jupiter.api.extension.AfterAllCallback; import org.junit.jupiter.api.extension.AfterTestExecutionCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; import org.junit.jupiter.api.extension.BeforeTestExecutionCallback; import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.platform.commons.support.AnnotationSupport; +import org.junit.platform.commons.support.ReflectionSupport; -import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.util.List; +import java.util.Optional; import static edu.harvard.iq.dataverse.util.testing.JvmSetting.PLACEHOLDER; -public class JvmSettingExtension implements BeforeTestExecutionCallback, AfterTestExecutionCallback { +public class JvmSettingExtension implements BeforeTestExecutionCallback, AfterTestExecutionCallback, BeforeAllCallback, AfterAllCallback { - private ExtensionContext.Store getStore(ExtensionContext context) { - return context.getStore(ExtensionContext.Namespace.create(getClass(), context.getRequiredTestClass(), context.getRequiredTestMethod())); + @Override + public void beforeAll(ExtensionContext extensionContext) throws Exception { + List settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestClass(), JvmSetting.class); + ExtensionContext.Store store = extensionContext.getStore( + ExtensionContext.Namespace.create(getClass(), extensionContext.getRequiredTestClass())); + + setSetting(extensionContext.getRequiredTestClass(), settings, getBroker(extensionContext), store); + } + + @Override + public void afterAll(ExtensionContext extensionContext) throws Exception { + List settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestClass(), JvmSetting.class); + ExtensionContext.Store store = extensionContext.getStore( + ExtensionContext.Namespace.create(getClass(), extensionContext.getRequiredTestClass())); + + resetSetting(settings, getBroker(extensionContext), store); } @Override public void beforeTestExecution(ExtensionContext extensionContext) throws Exception { - extensionContext.getTestMethod().ifPresent(method -> { - JvmSetting[] settings = method.getAnnotationsByType(JvmSetting.class); - for (JvmSetting setting : settings) { - // get the setting name (might need var args substitution) - String settingName = getSettingName(setting); - - // get the setting ... - String oldSetting = System.getProperty(settingName); + List settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestMethod(), JvmSetting.class); + ExtensionContext.Store store = extensionContext.getStore( + ExtensionContext.Namespace.create( + getClass(), + extensionContext.getRequiredTestClass(), + extensionContext.getRequiredTestMethod() + )); + + setSetting(extensionContext.getRequiredTestClass(), settings, getBroker(extensionContext), store); + } - // if present - store in context to restore later - if (oldSetting != null) { - getStore(extensionContext).put(settingName, oldSetting); - } - - // set to new value - if (setting.value().equals(PLACEHOLDER) && setting.method().equals(PLACEHOLDER)) { - throw new IllegalArgumentException("You must either provide a value or a method reference " + - "for key JvmSettings." + setting.key()); - } - - // retrieve value from static test class method if no setting given - if (setting.value().equals(PLACEHOLDER)) { - extensionContext.getTestClass().ifPresent(klass -> { - try { - Method valueMethod = klass.getDeclaredMethod(setting.method()); - valueMethod.setAccessible(true); - System.setProperty(settingName, (String)valueMethod.invoke(null)); - } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { - throw new RuntimeException(e); - } - }); - } else { - System.setProperty(settingName, setting.value()); + @Override + public void afterTestExecution(ExtensionContext extensionContext) throws Exception { + List settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestMethod(), JvmSetting.class); + ExtensionContext.Store store = extensionContext.getStore( + ExtensionContext.Namespace.create( + getClass(), + extensionContext.getRequiredTestClass(), + extensionContext.getRequiredTestMethod() + )); + + resetSetting(settings, getBroker(extensionContext), store); + } + + private void setSetting(Class testClass, List settings, JvmSettingBroker broker, ExtensionContext.Store store) throws Exception { + for (JvmSetting setting : settings) { + // get the setting name (might need var args substitution) + String settingName = getSettingName(setting); + + // get the setting value ... + String oldSetting = broker.getJvmSetting(settingName); + + // if present - store in context to restore later + if (oldSetting != null) { + store.put(settingName, oldSetting); + } + + // set to new value + if (setting.value().equals(PLACEHOLDER) && setting.method().equals(PLACEHOLDER)) { + throw new IllegalArgumentException("You must either provide a value or a method reference " + + "for key JvmSettings" + setting.key()); + } + + String value; + // Retrieve value from static (!) test class method if no direct setting given + if (setting.value().equals(PLACEHOLDER)) { + Optional valueMethod = ReflectionSupport.findMethod(testClass, setting.method()); + if (valueMethod.isEmpty() || ! Modifier.isStatic(valueMethod.get().getModifiers())) { + throw new IllegalStateException("Could not find a static method '" + setting.method() + "' in test class"); } + value = (String) ReflectionSupport.invokeMethod(valueMethod.get(), null); + // Set to new value by using the directly given value + } else { + value = setting.value(); } - }); + + // If the retrieved value is null, delete the setting (will be reset after the test), otherwise set. + if (value != null) { + broker.setJvmSetting(settingName, value); + } else if (oldSetting != null) { + broker.deleteJvmSetting(settingName); + } + } } - @Override - public void afterTestExecution(ExtensionContext extensionContext) throws Exception { - extensionContext.getTestMethod().ifPresent(method -> { - JvmSetting[] settings = method.getAnnotationsByType(JvmSetting.class); - for (JvmSetting setting : settings) { - // get the setting name (might need var args substitution) - String settingName = getSettingName(setting); - - // get a stored setting from context - String oldSetting = getStore(extensionContext).remove(settingName, String.class); - - // if present before, restore - if (oldSetting != null) { - System.setProperty(settingName, oldSetting); + private void resetSetting(List settings, JvmSettingBroker broker, ExtensionContext.Store store) throws Exception { + for (JvmSetting setting : settings) { + // get the setting name (might need var args substitution) + String settingName = getSettingName(setting); + + // get a stored setting from context + String oldSetting = store.remove(settingName, String.class); + + // if present before, restore + if (oldSetting != null) { + broker.setJvmSetting(settingName, oldSetting); // if NOT present before, delete - } else { - System.clearProperty(settingName); - } + } else { + broker.deleteJvmSetting(settingName); } - }); + } } private String getSettingName(JvmSetting setting) { @@ -95,4 +138,15 @@ private String getSettingName(JvmSetting setting) { return target.getScopedKey(); } + + private JvmSettingBroker getBroker(ExtensionContext extensionContext) throws Exception { + // Is this test class using local system properties, then get a broker for these + if (AnnotationSupport.isAnnotated(extensionContext.getTestClass(), LocalJvmSettings.class)) { + return LocalJvmSettings.localBroker; + // NOTE: this might be extended later with other annotations to support other means of handling the settings + } else { + throw new IllegalStateException("You must provide the @LocalJvmSettings annotation to the test class"); + } + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/LocalJvmSettings.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/LocalJvmSettings.java new file mode 100644 index 00000000000..372fa91f6f6 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/LocalJvmSettings.java @@ -0,0 +1,39 @@ +package edu.harvard.iq.dataverse.util.testing; + +import org.junit.jupiter.api.extension.ExtendWith; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * This annotation expresses that a test class wants to manipulate local + * settings (because the tests run within the same JVM as the code itself). + * This is mostly true for unit tests. + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE }) +@ExtendWith(JvmSettingExtension.class) +@Inherited +public @interface LocalJvmSettings { + + JvmSettingBroker localBroker = new JvmSettingBroker() { + @Override + public String getJvmSetting(String key) { + return System.getProperty(key); + } + + @Override + public void setJvmSetting(String key, String value) { + System.setProperty(key, value); + } + + @Override + public String deleteJvmSetting(String key) { + return System.clearProperty(key); + } + }; + +} \ No newline at end of file From 645770f0c08c042f934707d88c187de34ebab95b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sun, 21 May 2023 22:59:20 +0200 Subject: [PATCH 0201/1525] refactor(test): make existing test using JvmSetting annotated with @LocalJvmSettings --- .../dataverse/api/auth/BearerTokenAuthMechanismTest.java | 4 +++- .../api/auth/SessionCookieAuthMechanismTest.java | 8 +++++--- .../iq/dataverse/export/SchemaDotOrgExporterTest.java | 2 ++ .../dataverse/externaltools/ExternalToolHandlerTest.java | 2 ++ .../harvard/iq/dataverse/search/IndexServiceBeanTest.java | 2 ++ .../iq/dataverse/search/SolrClientServiceTest.java | 2 ++ .../harvard/iq/dataverse/settings/FeatureFlagsTest.java | 2 ++ .../harvard/iq/dataverse/settings/JvmSettingsTest.java | 2 ++ .../edu/harvard/iq/dataverse/util/SystemConfigTest.java | 2 ++ .../edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java | 2 ++ 10 files changed, 24 insertions(+), 4 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java index 0370daa5ea2..b38300df660 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java @@ -12,6 +12,7 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mockito; @@ -25,7 +26,8 @@ import static edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism.*; import static org.junit.jupiter.api.Assertions.*; -public class BearerTokenAuthMechanismTest { +@LocalJvmSettings +class BearerTokenAuthMechanismTest { private static final String TEST_API_KEY = "test-api-key"; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanismTest.java index 477f8ee377a..74a7d239c05 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanismTest.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mockito; @@ -13,7 +14,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; -public class SessionCookieAuthMechanismTest { +@LocalJvmSettings +class SessionCookieAuthMechanismTest { private SessionCookieAuthMechanism sut; @@ -24,7 +26,7 @@ public void setUp() { @Test @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "false", varArgs = "api-session-auth") - public void testFindUserFromRequest_FeatureFlagDisabled() throws WrappedAuthErrorResponse { + void testFindUserFromRequest_FeatureFlagDisabled() throws WrappedAuthErrorResponse { sut.session = Mockito.mock(DataverseSession.class); User actual = sut.findUserFromRequest(new ContainerRequestTestFake()); @@ -34,7 +36,7 @@ public void testFindUserFromRequest_FeatureFlagDisabled() throws WrappedAuthErro @Test @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-session-auth") - public void testFindUserFromRequest_FeatureFlagEnabled_UserAuthenticated() throws WrappedAuthErrorResponse { + void testFindUserFromRequest_FeatureFlagEnabled_UserAuthenticated() throws WrappedAuthErrorResponse { DataverseSession dataverseSessionStub = Mockito.mock(DataverseSession.class); User testAuthenticatedUser = new AuthenticatedUser(); Mockito.when(dataverseSessionStub.getUser()).thenReturn(testAuthenticatedUser); diff --git a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java index e660cf78da2..722b74406d4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java @@ -33,6 +33,7 @@ import javax.json.JsonObject; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Test; @@ -44,6 +45,7 @@ /** * For docs see {@link SchemaDotOrgExporter}. */ +@LocalJvmSettings public class SchemaDotOrgExporterTest { private static final Logger logger = Logger.getLogger(SchemaDotOrgExporterTest.class.getCanonicalName()); diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java index ab3a0263d66..c77d59123e4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java @@ -10,6 +10,7 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.Test; import javax.json.Json; @@ -21,6 +22,7 @@ import java.util.ArrayList; import java.util.List; +@LocalJvmSettings public class ExternalToolHandlerTest { // TODO: It would probably be better to split these into individual tests. diff --git a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java index dd3dc2c6c95..ce6005a3d11 100644 --- a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java @@ -18,6 +18,7 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.junit.jupiter.api.BeforeEach; @@ -37,6 +38,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; +@LocalJvmSettings @ExtendWith(MockitoExtension.class) public class IndexServiceBeanTest { private static final Logger logger = Logger.getLogger(IndexServiceBeanTest.class.getCanonicalName()); diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java index a3b3c8a2080..72eafcd763c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java @@ -4,6 +4,7 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -14,6 +15,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; +@LocalJvmSettings @ExtendWith(MockitoExtension.class) class SolrClientServiceTest { diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/FeatureFlagsTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/FeatureFlagsTest.java index 1a9fdeaa3da..26f2186695d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/settings/FeatureFlagsTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/settings/FeatureFlagsTest.java @@ -1,10 +1,12 @@ package edu.harvard.iq.dataverse.settings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; +@LocalJvmSettings class FeatureFlagsTest { @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java index 559d00fe0b7..6b03f20fc41 100644 --- a/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java @@ -1,11 +1,13 @@ package edu.harvard.iq.dataverse.settings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import edu.harvard.iq.dataverse.util.testing.SystemProperty; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; +@LocalJvmSettings class JvmSettingsTest { @Test @JvmSetting(key = JvmSettings.VERSION, value = "foobar") diff --git a/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java b/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java index 2806aa3aa9b..82b89bca678 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; @@ -15,6 +16,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.doReturn; +@LocalJvmSettings @ExtendWith(MockitoExtension.class) class SystemConfigTest { diff --git a/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java index 8310af8885c..d70a108e7c6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java @@ -9,6 +9,7 @@ import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.Test; import java.util.ArrayList; @@ -16,6 +17,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; +@LocalJvmSettings class UrlTokenUtilTest { @Test From dd80162cb7f1b6ad5268057cdb7da547c9bf4b62 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sun, 21 May 2023 23:01:15 +0200 Subject: [PATCH 0202/1525] style(test,api,auth): make BearerTokenAuthMechanismTest simpler --- .../auth/BearerTokenAuthMechanismTest.java | 55 +++++-------------- 1 file changed, 15 insertions(+), 40 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java index b38300df660..8a57ee4c41c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java @@ -27,6 +27,7 @@ import static org.junit.jupiter.api.Assertions.*; @LocalJvmSettings +@JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") class BearerTokenAuthMechanismTest { private static final String TEST_API_KEY = "test-api-key"; @@ -36,14 +37,12 @@ class BearerTokenAuthMechanismTest { @BeforeEach public void setUp() { sut = new BearerTokenAuthMechanism(); - } - - @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_no_token() throws WrappedAuthErrorResponse { sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); sut.userSvc = Mockito.mock(UserServiceBean.class); + } + @Test + void testFindUserFromRequest_no_token() throws WrappedAuthErrorResponse { ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake(null); User actual = sut.findUserFromRequest(testContainerRequest); @@ -51,11 +50,9 @@ public void testFindUserFromRequest_no_token() throws WrappedAuthErrorResponse { } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_invalid_token() throws WrappedAuthErrorResponse { - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - sut.userSvc = Mockito.mock(UserServiceBean.class); + void testFindUserFromRequest_invalid_token() throws WrappedAuthErrorResponse { Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet()); + ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer "); WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); @@ -63,11 +60,9 @@ public void testFindUserFromRequest_invalid_token() throws WrappedAuthErrorRespo assertEquals(INVALID_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage()); } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_no_OidcProvider() throws WrappedAuthErrorResponse { - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - sut.userSvc = Mockito.mock(UserServiceBean.class); + void testFindUserFromRequest_no_OidcProvider() throws WrappedAuthErrorResponse { Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet()); + ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " +TEST_API_KEY); WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); @@ -76,12 +71,7 @@ public void testFindUserFromRequest_no_OidcProvider() throws WrappedAuthErrorRes } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { - - sut.userSvc = Mockito.mock(UserServiceBean.class); - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - + void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -91,7 +81,7 @@ public void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedA // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifierForValidToken(token)).thenReturn(Optional.empty()); + Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenReturn(Optional.empty()); // when ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " + TEST_API_KEY); @@ -102,12 +92,7 @@ public void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedA } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { - - sut.userSvc = Mockito.mock(UserServiceBean.class); - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - + void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -127,12 +112,7 @@ public void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedA assertEquals(UNAUTHORIZED_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage()); } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { - - sut.userSvc = Mockito.mock(UserServiceBean.class); - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - + void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -143,7 +123,7 @@ public void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthE // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token UserRecordIdentifier userinfo = new UserRecordIdentifier(providerID, "KEY"); BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifierForValidToken(token)).thenReturn(Optional.of(userinfo)); + Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenReturn(Optional.of(userinfo)); // ensures that the AuthenticationServiceBean can retrieve an Authenticated user based on the UserRecordIdentifier AuthenticatedUser testAuthenticatedUser = new AuthenticatedUser(); @@ -160,12 +140,7 @@ public void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthE } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_oneProvider_validToken_noAccount() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { - - sut.userSvc = Mockito.mock(UserServiceBean.class); - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - + void testFindUserFromRequest_oneProvider_validToken_noAccount() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -176,7 +151,7 @@ public void testFindUserFromRequest_oneProvider_validToken_noAccount() throws Wr // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token UserRecordIdentifier userinfo = new UserRecordIdentifier(providerID, "KEY"); BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifierForValidToken(token)).thenReturn(Optional.of(userinfo)); + Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenReturn(Optional.of(userinfo)); // ensures that the AuthenticationServiceBean can retrieve an Authenticated user based on the UserRecordIdentifier Mockito.when(sut.authSvc.lookupUser(userinfo)).thenReturn(null); From 544a502e3f57b52a2dbaae562e43083e7a015315 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sun, 21 May 2023 23:03:33 +0200 Subject: [PATCH 0203/1525] refactor(auth,api,test): adapt exception checks in BearerTokenAuthMechanismTest - Now that we no longer need to catch OAuth2Exception, adapt accordingly - This fixed the failing tests --- .../api/auth/BearerTokenAuthMechanismTest.java | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java index 8a57ee4c41c..281f1d21d45 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java @@ -6,7 +6,6 @@ import edu.harvard.iq.dataverse.api.auth.doubles.BearerTokenKeyContainerRequestTestFake; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; -import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception; import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthProvider; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; @@ -50,7 +49,7 @@ void testFindUserFromRequest_no_token() throws WrappedAuthErrorResponse { } @Test - void testFindUserFromRequest_invalid_token() throws WrappedAuthErrorResponse { + void testFindUserFromRequest_invalid_token() { Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet()); ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer "); @@ -60,7 +59,7 @@ void testFindUserFromRequest_invalid_token() throws WrappedAuthErrorResponse { assertEquals(INVALID_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage()); } @Test - void testFindUserFromRequest_no_OidcProvider() throws WrappedAuthErrorResponse { + void testFindUserFromRequest_no_OidcProvider() { Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet()); ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " +TEST_API_KEY); @@ -71,7 +70,7 @@ void testFindUserFromRequest_no_OidcProvider() throws WrappedAuthErrorResponse { } @Test - void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { + void testFindUserFromRequest_oneProvider_invalidToken_1() throws ParseException, IOException { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -92,7 +91,7 @@ void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedAuthErro } @Test - void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { + void testFindUserFromRequest_oneProvider_invalidToken_2() throws ParseException, IOException { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -102,7 +101,7 @@ void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedAuthErro // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifierForValidToken(token)).thenThrow(OAuth2Exception.class); + Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenThrow(IOException.class); // when ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " + TEST_API_KEY); @@ -112,7 +111,7 @@ void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedAuthErro assertEquals(UNAUTHORIZED_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage()); } @Test - void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { + void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorResponse, ParseException, IOException { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -140,7 +139,7 @@ void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorRes } @Test - void testFindUserFromRequest_oneProvider_validToken_noAccount() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { + void testFindUserFromRequest_oneProvider_validToken_noAccount() throws WrappedAuthErrorResponse, ParseException, IOException { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); From c859ef64adc6cb59065da4f0cf6aa0097a022701 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sun, 21 May 2023 23:16:19 +0200 Subject: [PATCH 0204/1525] doc(test): add changes to JvmSetting helper in testing guide --- doc/sphinx-guides/source/developers/testing.rst | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index c734bed8b70..5814d9d4e7b 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -85,20 +85,26 @@ JUnit 5 Test Helper Extensions Our codebase provides little helpers to ease dealing with state during tests. Some tests might need to change something which should be restored after the test ran. -For unit tests, the most interesting part is to set a JVM setting just for the current test. -Please use the ``@JvmSetting(key = JvmSettings.XXX, value = "")`` annotation on a test method or -a test class to set and clear the property automatically. +For unit tests, the most interesting part is to set a JVM setting just for the current test or a whole test class. +(Which might be an inner class, too!). Please make use of the ``@JvmSetting(key = JvmSettings.XXX, value = "")`` +annotation and also make sure to annotate the test class with ``@LocalJvmSettings``. Inspired by JUnit's ``@MethodSource`` annotation, you may use ``@JvmSetting(key = JvmSettings.XXX, method = "zzz")`` -to reference a method located in the same test class by name (i. e. ``private static String zzz() {}``) to allow +to reference a static method located in the same test class by name (i. e. ``private static String zzz() {}``) to allow retrieving dynamic data instead of String constants only. (Note the requirement for a *static* method!) +If you want to delete a setting, simply provide a ``null`` value. This can be used to override a class-wide setting +or some other default that is present for some reason. + To set arbitrary system properties for the current test, a similar extension ``@SystemProperty(key = "", value = "")`` has been added. (Note: it does not support method references.) Both extensions will ensure the global state of system properties is non-interfering for test executions. Tests using these extensions will be executed in serial. +This settings helper may be extended at a later time to manipulate settings in a remote instance during integration +or end-to-end testing. Stay tuned! + Observing Changes to Code Coverage ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From bda75c18d5b1799e81f17f6711b9323441f559ff Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 01:46:52 +0200 Subject: [PATCH 0205/1525] feat(model): make UserRecordIdentifier comparable Adding equals and hashCode methods to enable testing for equality in tests, etc. --- .../authorization/UserRecordIdentifier.java | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java b/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java index 963ee592bbf..dfbb43fae46 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java @@ -2,6 +2,8 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import java.util.Objects; + /** * Identifies a user using two strings: *
      @@ -38,4 +40,16 @@ public AuthenticatedUserLookup createAuthenticatedUserLookup( AuthenticatedUser return new AuthenticatedUserLookup(userIdInRepo, repoId, u); } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof UserRecordIdentifier)) return false; + UserRecordIdentifier that = (UserRecordIdentifier) o; + return Objects.equals(repoId, that.repoId) && Objects.equals(getUserIdInRepo(), that.getUserIdInRepo()); + } + + @Override + public int hashCode() { + return Objects.hash(repoId, getUserIdInRepo()); + } } From 7004191eeec6f2deb5a01d52222fa1da9cea725b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 01:52:38 +0200 Subject: [PATCH 0206/1525] feat(test,api,auth): create actual integration tests in OIDCAuthenticationProviderFactoryIT - First test makes a roundtrip to receive the user info for the kcuser - Second test simulates an API request with a bearer token --- .../OIDCAuthenticationProviderFactoryIT.java | 123 +++++++++++++++++- 1 file changed, 116 insertions(+), 7 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java index 53cfcca2742..a5aa29cc083 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -1,37 +1,146 @@ package edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc; +import com.nimbusds.oauth2.sdk.token.BearerAccessToken; +import com.nimbusds.openid.connect.sdk.claims.UserInfo; import dasniko.testcontainers.keycloak.KeycloakContainer; +import edu.harvard.iq.dataverse.UserServiceBean; +import edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism; +import edu.harvard.iq.dataverse.api.auth.doubles.BearerTokenKeyContainerRequestTestFake; +import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; +import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.mocks.MockAuthenticatedUser; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.keycloak.admin.client.Keycloak; +import org.keycloak.admin.client.KeycloakBuilder; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; +import java.util.Optional; +import java.util.Set; + +import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientId; +import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientSecret; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assumptions.assumeFalse; +import static org.junit.jupiter.api.Assumptions.assumeTrue; +import static org.mockito.Mockito.when; @Tag("testcontainers") @Testcontainers +@ExtendWith(MockitoExtension.class) +// NOTE: order is important here - Testcontainers must be first, otherwise it's not ready when we call getAuthUrl() +@LocalJvmSettings +@JvmSetting(key = JvmSettings.OIDC_CLIENT_ID, value = clientId) +@JvmSetting(key = JvmSettings.OIDC_CLIENT_SECRET, value = clientSecret) +@JvmSetting(key = JvmSettings.OIDC_AUTH_SERVER_URL, method = "getAuthUrl") class OIDCAuthenticationProviderFactoryIT { + // NOTE: the following values are taken from the realm import file! static final String clientId = "oidc-client"; static final String clientSecret = "ss6gE8mODCDfqesQaSG3gwUwZqZt547E"; static final String realm = "oidc-realm"; + static final String adminUser = "kcuser"; + static final String adminPassword = "kcpassword"; + static final String clientIdAdminCli = "admin-cli"; + // The realm JSON resides in conf/keycloak/oidc-realm.json and gets avail here using in pom.xml @Container - static KeycloakContainer keycloakContainer = new KeycloakContainer().withRealmImportFile("keycloak/oidc-realm.json"); + static KeycloakContainer keycloakContainer = new KeycloakContainer("quay.io/keycloak/keycloak:19.0") + .withRealmImportFile("keycloak/oidc-realm.json") + .withAdminUsername(adminUser) + .withAdminPassword(adminPassword); - // simple method to retrieve the issuer URL, referenced to by @JvmSetting annotations + // simple method to retrieve the issuer URL, referenced to by @JvmSetting annotations (do no delete) private static String getAuthUrl() { return keycloakContainer.getAuthServerUrl() + "realms/" + realm; } + OIDCAuthProvider getProvider() throws Exception { + OIDCAuthProvider oidcAuthProvider = (OIDCAuthProvider) OIDCAuthenticationProviderFactory.buildFromSettings(); + + assumeTrue(oidcAuthProvider.getMetadata().getTokenEndpointURI().toString() + .startsWith(keycloakContainer.getAuthServerUrl())); + + return oidcAuthProvider; + } + + Keycloak getAdminClient() { + return KeycloakBuilder.builder() + .serverUrl(keycloakContainer.getAuthServerUrl()) + .realm(realm) + .clientId(clientIdAdminCli) + .username(keycloakContainer.getAdminUsername()) + .password(keycloakContainer.getAdminPassword()) + .build(); + } + + String getBearerToken() throws Exception { + Keycloak keycloak = getAdminClient(); + return keycloak.tokenManager().getAccessTokenString(); + } + @Test - @JvmSetting(key = JvmSettings.OIDC_CLIENT_ID, value = clientId) - @JvmSetting(key = JvmSettings.OIDC_CLIENT_SECRET, value = clientSecret) - @JvmSetting(key = JvmSettings.OIDC_AUTH_SERVER_URL, method = "getAuthUrl") void testCreateProvider() throws Exception { - OIDCAuthProvider oidcAuthProvider = (OIDCAuthProvider) OIDCAuthenticationProviderFactory.buildFromSettings(); - assertTrue(oidcAuthProvider.getMetadata().getTokenEndpointURI().toString().startsWith(keycloakContainer.getAuthServerUrl())); + OIDCAuthProvider oidcAuthProvider = getProvider(); + String token = getBearerToken(); + assumeFalse(token == null); + + Optional info = oidcAuthProvider.getUserInfo(new BearerAccessToken(token)); + + assertTrue(info.isPresent()); + assertEquals(adminUser, info.get().getPreferredUsername()); + } + + @Mock + UserServiceBean userService; + @Mock + AuthenticationServiceBean authService; + + @InjectMocks + BearerTokenAuthMechanism bearerTokenAuthMechanism; + + @Test + @JvmSetting(key = JvmSettings.FEATURE_FLAG, varArgs = "api-bearer-auth", value = "true") + void testApiBearerAuth() throws Exception { + assumeFalse(userService == null); + assumeFalse(authService == null); + assumeFalse(bearerTokenAuthMechanism == null); + + // given + // Get the access token from the remote Keycloak in the container + String accessToken = getBearerToken(); + assumeFalse(accessToken == null); + + OIDCAuthProvider oidcAuthProvider = getProvider(); + // This will also receive the details from the remote Keycloak in the container + UserRecordIdentifier identifier = oidcAuthProvider.getUserIdentifier(new BearerAccessToken(accessToken)).get(); + String token = "Bearer " + accessToken; + BearerTokenKeyContainerRequestTestFake request = new BearerTokenKeyContainerRequestTestFake(token); + AuthenticatedUser user = new MockAuthenticatedUser(); + + // setup mocks (we don't want or need a database here) + when(authService.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Set.of(oidcAuthProvider.getId())); + when(authService.getAuthenticationProvider(oidcAuthProvider.getId())).thenReturn(oidcAuthProvider); + when(authService.lookupUser(identifier)).thenReturn(user); + when(userService.updateLastApiUseTime(user)).thenReturn(user); + + // when (let's do this again, but now with the actual subject under test!) + User lookedUpUser = bearerTokenAuthMechanism.findUserFromRequest(request); + + // then + assertNotNull(lookedUpUser); + assertEquals(user, lookedUpUser); } } \ No newline at end of file From 564d6a73bcc0e101299c7f370bdf5fc6d42f8287 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 01:53:34 +0200 Subject: [PATCH 0207/1525] feat(build): make integration test using Testcontainers executable via Maven Simply call mvn verify to execute, as it is meant to be! --- pom.xml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/pom.xml b/pom.xml index 8764e4f493d..adda4bb31f5 100644 --- a/pom.xml +++ b/pom.xml @@ -729,6 +729,22 @@ ${skipUnitTests} + + + org.apache.maven.plugins + maven-failsafe-plugin + + testcontainers + + + + + integration-test + verify + + + + org.apache.maven.plugins maven-checkstyle-plugin From c207b3baa95a01c9d67cefd1cacaf5784bd914ff Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 01:53:57 +0200 Subject: [PATCH 0208/1525] chore(build): update Keycloak Testcontainers version --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index adda4bb31f5..313f33b94b8 100644 --- a/pom.xml +++ b/pom.xml @@ -577,7 +577,7 @@ com.github.dasniko testcontainers-keycloak - 2.4.0 + 2.5.0 test From 7f8225f93c2353deb3f13e515ef1f43e9b0630c4 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 01:56:13 +0200 Subject: [PATCH 0209/1525] chore(build,test): add temporary servlet dependency in test scope This is necessary because the Jakarta EE 8.0.0 API package did not properly ship the bundle files necessary for servlet execution. Not including this testing dependency leads to very cryptic errors ala MissingResourceException for Bundle with lang en-US. This should be removed once we migrate to Jakarta EE 10. --- pom.xml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/pom.xml b/pom.xml index 313f33b94b8..e9a9b9dd611 100644 --- a/pom.xml +++ b/pom.xml @@ -580,6 +580,18 @@ 2.5.0 test + + + jakarta.servlet + jakarta.servlet-api + 4.0.4 + test + + org.mockito mockito-core From 16ab0e8a895517a2883ce1ef40c6c13e5497fe0c Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 22 May 2023 10:18:35 +0100 Subject: [PATCH 0210/1525] Fixed: getAnonymizedDraftVersion endpoint to be getPrivateUrlDatasetVersion --- .../harvard/iq/dataverse/api/Datasets.java | 30 +++--- .../iq/dataverse/dataset/DatasetUtil.java | 16 ---- .../iq/dataverse/util/json/JsonPrinter.java | 63 ++++++++---- .../harvard/iq/dataverse/api/DatasetsIT.java | 96 +++++++++++++++---- .../edu/harvard/iq/dataverse/api/UsersIT.java | 2 +- .../edu/harvard/iq/dataverse/api/UtilIT.java | 11 ++- .../iq/dataverse/dataset/DatasetUtilTest.java | 31 ------ .../dataverse/util/json/JsonPrinterTest.java | 30 ++++++ 8 files changed, 187 insertions(+), 92 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index fe3fa13b8d7..7036fb5fccc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -10,6 +10,7 @@ import edu.harvard.iq.dataverse.authorization.RoleAssignee; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; @@ -3867,19 +3868,26 @@ public Response getDatasetSummaryFieldNames() { } @GET - @AuthRequired - @Path("anonymizedDraftVersions/{privateUrlToken}") - public Response getAnonymizedDraftVersion(@Context ContainerRequestContext crc, - @PathParam("privateUrlToken") String privateUrlToken, - @QueryParam("anonymizedFieldValue") String anonymizedFieldValue) { + @Path("privateUrlDatasetVersion/{privateUrlToken}") + public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String privateUrlToken, + @QueryParam("anonymizedFieldValue") String anonymizedFieldValue) { + PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken); + boolean isAnonymizedAccess = privateUrlUser.hasAnonymizedAccess(); String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames); - if (anonymizedFieldTypeNames == null) { + if(isAnonymizedAccess && anonymizedFieldTypeNames == null) { throw new NotAcceptableException("Anonymized Access not enabled"); } - return response(req -> { - DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken); - return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") - : ok(json(DatasetUtil.anonymizeDatasetVersion(dsv, anonymizedFieldTypeNames, anonymizedFieldValue))); - }, getRequestUser(crc)); + DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken); + if (dsv == null || dsv.getId() == null) { + return notFound("Dataset version not found"); + } + JsonObjectBuilder responseJson; + if (isAnonymizedAccess) { + List anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s"))); + responseJson = json(dsv, anonymizedFieldTypeNamesList, anonymizedFieldValue); + } else { + responseJson = json(dsv); + } + return ok(responseJson); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index 9b51a194733..a75775810d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -3,7 +3,6 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetField; -import edu.harvard.iq.dataverse.DatasetFieldValue; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; @@ -621,19 +620,4 @@ public static String getLocaleExternalStatus(String status) { } return localizedName; } - - public static DatasetVersion anonymizeDatasetVersion(DatasetVersion datasetVersion, String anonymizedFieldTypeNames, String anonymizedFieldValue) { - List anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s"))); - List datasetFields = datasetVersion.getDatasetFields(); - for (DatasetField datasetField : datasetFields) { - if (anonymizedFieldTypeNamesList.contains(datasetField.getDatasetFieldType().getName())) { - List datasetFieldValues = datasetField.getDatasetFieldValues(); - for (DatasetFieldValue datasetFieldValue : datasetFieldValues) { - datasetFieldValue.setValue((anonymizedFieldValue == null) ? BundleUtil.getStringFromBundle("dataset.anonymized.withheld") : anonymizedFieldValue); - } - datasetField.setDatasetFieldValues(datasetFieldValues); - } - } - return datasetVersion; - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index fd15bb118b0..700a54d5e13 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -40,6 +40,7 @@ import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.DatasetFieldWalker; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; @@ -357,6 +358,10 @@ public static JsonObjectBuilder json(FileDetailsHolder ds) { } public static JsonObjectBuilder json(DatasetVersion dsv) { + return json(dsv, null, null); + } + + public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, String anonymizedFieldValue) { JsonObjectBuilder bld = jsonObjectBuilder() .add("id", dsv.getId()).add("datasetId", dsv.getDataset().getId()) .add("datasetPersistentId", dsv.getDataset().getGlobalId().asString()) @@ -368,7 +373,7 @@ public static JsonObjectBuilder json(DatasetVersion dsv) { .add("UNF", dsv.getUNF()).add("archiveTime", format(dsv.getArchiveTime())) .add("lastUpdateTime", format(dsv.getLastUpdateTime())).add("releaseTime", format(dsv.getReleaseTime())) .add("createTime", format(dsv.getCreateTime())); - License license = DatasetUtil.getLicense(dsv);; + License license = DatasetUtil.getLicense(dsv); if (license != null) { // Standard license bld.add("license", jsonObjectBuilder() @@ -394,14 +399,15 @@ public static JsonObjectBuilder json(DatasetVersion dsv) { .add("studyCompletion", dsv.getTermsOfUseAndAccess().getStudyCompletion()) .add("fileAccessRequest", dsv.getTermsOfUseAndAccess().isFileAccessRequest()); - bld.add("metadataBlocks", jsonByBlocks(dsv.getDatasetFields())); - + bld.add("metadataBlocks", (anonymizedFieldTypeNamesList != null) ? + jsonByBlocks(dsv.getDatasetFields(), anonymizedFieldTypeNamesList, anonymizedFieldValue) + : jsonByBlocks(dsv.getDatasetFields()) + ); bld.add("files", jsonFileMetadatas(dsv.getFileMetadatas())); return bld; } - - + public static JsonObjectBuilder jsonDataFileList(List dataFiles){ if (dataFiles==null){ @@ -474,11 +480,15 @@ public static JsonObjectBuilder json(DatasetDistributor dist) { } public static JsonObjectBuilder jsonByBlocks(List fields) { + return jsonByBlocks(fields, null, null); + } + + public static JsonObjectBuilder jsonByBlocks(List fields, List anonymizedFieldTypeNamesList, String anonymizedFieldValue) { JsonObjectBuilder blocksBld = jsonObjectBuilder(); for (Map.Entry> blockAndFields : DatasetField.groupByBlock(fields).entrySet()) { MetadataBlock block = blockAndFields.getKey(); - blocksBld.add(block.getName(), JsonPrinter.json(block, blockAndFields.getValue())); + blocksBld.add(block.getName(), JsonPrinter.json(block, blockAndFields.getValue(), anonymizedFieldTypeNamesList, anonymizedFieldValue)); } return blocksBld; } @@ -492,6 +502,10 @@ public static JsonObjectBuilder jsonByBlocks(List fields) { * @return JSON Object builder with the block and fields information. */ public static JsonObjectBuilder json(MetadataBlock block, List fields) { + return json(block, fields, null, null); + } + + public static JsonObjectBuilder json(MetadataBlock block, List fields, List anonymizedFieldTypeNamesList, String anonymizedFieldValue) { JsonObjectBuilder blockBld = jsonObjectBuilder(); blockBld.add("displayName", block.getDisplayName()); @@ -499,7 +513,7 @@ public static JsonObjectBuilder json(MetadataBlock block, List fie final JsonArrayBuilder fieldsArray = Json.createArrayBuilder(); Map cvocMap = (datasetFieldService==null) ? new HashMap() :datasetFieldService.getCVocConf(true); - DatasetFieldWalker.walk(fields, settingsService, cvocMap, new DatasetFieldsToJson(fieldsArray)); + DatasetFieldWalker.walk(fields, settingsService, cvocMap, new DatasetFieldsToJson(fieldsArray, anonymizedFieldTypeNamesList, anonymizedFieldValue)); blockBld.add("fields", fieldsArray); return blockBld; @@ -734,12 +748,19 @@ private static class DatasetFieldsToJson implements DatasetFieldWalker.Listener Deque objectStack = new LinkedList<>(); Deque valueArrStack = new LinkedList<>(); - JsonObjectBuilder result = null; + List anonymizedFieldTypeNamesList = null; + String anonymizedFieldValue = null; DatasetFieldsToJson(JsonArrayBuilder result) { valueArrStack.push(result); } + DatasetFieldsToJson(JsonArrayBuilder result, List anonymizedFieldTypeNamesList, String anonymizedFieldValue) { + this(result); + this.anonymizedFieldTypeNamesList = anonymizedFieldTypeNamesList; + this.anonymizedFieldValue = anonymizedFieldValue; + } + @Override public void startField(DatasetField f) { objectStack.push(jsonObjectBuilder()); @@ -764,15 +785,19 @@ public void endField(DatasetField f) { JsonArray expandedValues = valueArrStack.pop().build(); JsonArray jsonValues = valueArrStack.pop().build(); if (!jsonValues.isEmpty()) { - jsonField.add("value", - f.getDatasetFieldType().isAllowMultiples() ? jsonValues - : jsonValues.get(0)); - if (!expandedValues.isEmpty()) { - jsonField.add("expandedvalue", - f.getDatasetFieldType().isAllowMultiples() ? expandedValues - : expandedValues.get(0)); + String datasetFieldName = f.getDatasetFieldType().getName(); + if (anonymizedFieldTypeNamesList != null && anonymizedFieldTypeNamesList.contains(datasetFieldName)) { + anonymizeField(jsonField); + } else { + jsonField.add("value", + f.getDatasetFieldType().isAllowMultiples() ? jsonValues + : jsonValues.get(0)); + if (!expandedValues.isEmpty()) { + jsonField.add("expandedvalue", + f.getDatasetFieldType().isAllowMultiples() ? expandedValues + : expandedValues.get(0)); + } } - valueArrStack.peek().add(jsonField); } } @@ -817,6 +842,12 @@ public void endCompoundValue(DatasetFieldCompoundValue dsfcv) { valueArrStack.peek().add(jsonField); } } + + private void anonymizeField(JsonObjectBuilder jsonField) { + jsonField.add("typeClass", "primitive"); + jsonField.add("value", (anonymizedFieldValue == null) ? BundleUtil.getStringFromBundle("dataset.anonymized.withheld") : anonymizedFieldValue); + jsonField.add("multiple", false); + } } public static JsonObjectBuilder json(AuthenticationProviderRow aRow) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 5ece0e0d018..9d86723bcd1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1,13 +1,16 @@ package edu.harvard.iq.dataverse.api; import com.jayway.restassured.RestAssured; + import static com.jayway.restassured.RestAssured.given; + import com.jayway.restassured.http.ContentType; import com.jayway.restassured.response.Response; + import java.util.logging.Logger; + import org.junit.BeforeClass; import org.junit.Test; -import org.mockito.Mockito; import org.skyscreamer.jsonassert.JSONAssert; import org.junit.Ignore; import com.jayway.restassured.path.json.JsonPath; @@ -15,6 +18,7 @@ import java.util.List; import java.util.Map; import javax.json.JsonObject; + import static javax.ws.rs.core.Response.Status.CREATED; import static javax.ws.rs.core.Response.Status.FORBIDDEN; import static javax.ws.rs.core.Response.Status.OK; @@ -22,21 +26,30 @@ import static javax.ws.rs.core.Response.Status.NOT_FOUND; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; import static javax.ws.rs.core.Response.Status.METHOD_NOT_ALLOWED; +import static javax.ws.rs.core.Response.Status.CONFLICT; +import static javax.ws.rs.core.Response.Status.NO_CONTENT; + import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.DataverseServiceBean; import static edu.harvard.iq.dataverse.api.UtilIT.API_TOKEN_HTTP_HEADER; + import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + import java.util.UUID; + import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; import com.jayway.restassured.parsing.Parser; + import static com.jayway.restassured.path.json.JsonPath.with; + import com.jayway.restassured.path.xml.XmlPath; + import static edu.harvard.iq.dataverse.api.UtilIT.equalToCI; + import edu.harvard.iq.dataverse.authorization.groups.impl.builtin.AuthenticatedUsers; import edu.harvard.iq.dataverse.datavariable.VarGroup; import edu.harvard.iq.dataverse.datavariable.VariableMetadata; @@ -58,21 +71,22 @@ import javax.json.JsonArray; import javax.json.JsonObjectBuilder; import javax.ws.rs.core.Response.Status; -import static javax.ws.rs.core.Response.Status.CONFLICT; - -import static javax.ws.rs.core.Response.Status.NO_CONTENT; -import static javax.ws.rs.core.Response.Status.OK; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; + import static org.junit.Assert.assertEquals; + import org.hamcrest.CoreMatchers; + import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.CoreMatchers.nullValue; + import org.junit.AfterClass; import org.junit.Assert; + import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; @@ -860,7 +874,7 @@ public void testPrivateUrl() { String username = UtilIT.getUsernameFromResponse(createUser); String apiToken = UtilIT.getApiTokenFromResponse(createUser); - Response failToCreateWhenDatasetIdNotFound = UtilIT.privateUrlCreate(Integer.MAX_VALUE, apiToken); + Response failToCreateWhenDatasetIdNotFound = UtilIT.privateUrlCreate(Integer.MAX_VALUE, apiToken, false); failToCreateWhenDatasetIdNotFound.prettyPrint(); assertEquals(NOT_FOUND.getStatusCode(), failToCreateWhenDatasetIdNotFound.getStatusCode()); @@ -890,7 +904,7 @@ public void testPrivateUrl() { grantRole.prettyPrint(); assertEquals(OK.getStatusCode(), grantRole.getStatusCode()); UtilIT.getRoleAssignmentsOnDataverse(dataverseAlias, apiToken).prettyPrint(); - Response contributorDoesNotHavePermissionToCreatePrivateUrl = UtilIT.privateUrlCreate(datasetId, contributorApiToken); + Response contributorDoesNotHavePermissionToCreatePrivateUrl = UtilIT.privateUrlCreate(datasetId, contributorApiToken, false); contributorDoesNotHavePermissionToCreatePrivateUrl.prettyPrint(); assertEquals(UNAUTHORIZED.getStatusCode(), contributorDoesNotHavePermissionToCreatePrivateUrl.getStatusCode()); @@ -918,7 +932,7 @@ public void testPrivateUrl() { pristine.prettyPrint(); assertEquals(NOT_FOUND.getStatusCode(), pristine.getStatusCode()); - Response createPrivateUrl = UtilIT.privateUrlCreate(datasetId, apiToken); + Response createPrivateUrl = UtilIT.privateUrlCreate(datasetId, apiToken, false); createPrivateUrl.prettyPrint(); assertEquals(OK.getStatusCode(), createPrivateUrl.getStatusCode()); @@ -1078,11 +1092,11 @@ public void testPrivateUrl() { shouldNoLongerExist.prettyPrint(); assertEquals(NOT_FOUND.getStatusCode(), shouldNoLongerExist.getStatusCode()); - Response createPrivateUrlUnauth = UtilIT.privateUrlCreate(datasetId, userWithNoRolesApiToken); + Response createPrivateUrlUnauth = UtilIT.privateUrlCreate(datasetId, userWithNoRolesApiToken, false); createPrivateUrlUnauth.prettyPrint(); assertEquals(UNAUTHORIZED.getStatusCode(), createPrivateUrlUnauth.getStatusCode()); - Response createPrivateUrlAgain = UtilIT.privateUrlCreate(datasetId, apiToken); + Response createPrivateUrlAgain = UtilIT.privateUrlCreate(datasetId, apiToken, false); createPrivateUrlAgain.prettyPrint(); assertEquals(OK.getStatusCode(), createPrivateUrlAgain.getStatusCode()); @@ -1098,11 +1112,11 @@ public void testPrivateUrl() { tryToDeleteAlreadyDeletedPrivateUrl.prettyPrint(); assertEquals(NOT_FOUND.getStatusCode(), tryToDeleteAlreadyDeletedPrivateUrl.getStatusCode()); - Response createPrivateUrlOnceAgain = UtilIT.privateUrlCreate(datasetId, apiToken); + Response createPrivateUrlOnceAgain = UtilIT.privateUrlCreate(datasetId, apiToken, false); createPrivateUrlOnceAgain.prettyPrint(); assertEquals(OK.getStatusCode(), createPrivateUrlOnceAgain.getStatusCode()); - Response tryToCreatePrivateUrlWhenExisting = UtilIT.privateUrlCreate(datasetId, apiToken); + Response tryToCreatePrivateUrlWhenExisting = UtilIT.privateUrlCreate(datasetId, apiToken, false); tryToCreatePrivateUrlWhenExisting.prettyPrint(); assertEquals(FORBIDDEN.getStatusCode(), tryToCreatePrivateUrlWhenExisting.getStatusCode()); @@ -1121,7 +1135,7 @@ public void testPrivateUrl() { List noAssignmentsForPrivateUrlUser = with(publishingShouldHaveRemovedRoleAssignmentForPrivateUrlUser.body().asString()).param("member", "member").getJsonObject("data.findAll { data -> data._roleAlias == member }"); assertEquals(0, noAssignmentsForPrivateUrlUser.size()); - Response tryToCreatePrivateUrlToPublishedVersion = UtilIT.privateUrlCreate(datasetId, apiToken); + Response tryToCreatePrivateUrlToPublishedVersion = UtilIT.privateUrlCreate(datasetId, apiToken, false); tryToCreatePrivateUrlToPublishedVersion.prettyPrint(); assertEquals(FORBIDDEN.getStatusCode(), tryToCreatePrivateUrlToPublishedVersion.getStatusCode()); @@ -1130,7 +1144,7 @@ public void testPrivateUrl() { updatedMetadataResponse.prettyPrint(); assertEquals(OK.getStatusCode(), updatedMetadataResponse.getStatusCode()); - Response createPrivateUrlForPostVersionOneDraft = UtilIT.privateUrlCreate(datasetId, apiToken); + Response createPrivateUrlForPostVersionOneDraft = UtilIT.privateUrlCreate(datasetId, apiToken, false); createPrivateUrlForPostVersionOneDraft.prettyPrint(); assertEquals(OK.getStatusCode(), createPrivateUrlForPostVersionOneDraft.getStatusCode()); @@ -1157,7 +1171,7 @@ public void testPrivateUrl() { * a dataset is destroy. Still, we'll keep this test in here in case we * switch Private URL back to being its own table in the future. */ - Response createPrivateUrlToMakeSureItIsDeletedWithDestructionOfDataset = UtilIT.privateUrlCreate(datasetId, apiToken); + Response createPrivateUrlToMakeSureItIsDeletedWithDestructionOfDataset = UtilIT.privateUrlCreate(datasetId, apiToken, false); createPrivateUrlToMakeSureItIsDeletedWithDestructionOfDataset.prettyPrint(); assertEquals(OK.getStatusCode(), createPrivateUrlToMakeSureItIsDeletedWithDestructionOfDataset.getStatusCode()); @@ -3062,4 +3076,54 @@ public void testGetDatasetSummaryFieldNames() { } assertFalse(actualSummaryFields.isEmpty()); } + + + @Test + public void getPrivateUrlDatasetVersion() { + Response createUser = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + // Non-anonymized test + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + UtilIT.privateUrlCreate(datasetId, apiToken, false); + Response privateUrlGet = UtilIT.privateUrlGet(datasetId, apiToken); + String tokenForPrivateUrlUser = JsonPath.from(privateUrlGet.body().asString()).getString("data.token"); + + // We verify that the response contains the dataset associated to the private URL token + Response getPrivateUrlDatasetVersionResponse = UtilIT.getPrivateUrlDatasetVersion(tokenForPrivateUrlUser, null); + getPrivateUrlDatasetVersionResponse.then().assertThat() + .body("data.datasetId", equalTo(datasetId)) + .statusCode(OK.getStatusCode()); + + // Test anonymized + + Response setAnonymizedFieldsSettingResponse = UtilIT.setSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames, "author"); + setAnonymizedFieldsSettingResponse.then().assertThat().statusCode(OK.getStatusCode()); + + createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + UtilIT.privateUrlCreate(datasetId, apiToken, true); + privateUrlGet = UtilIT.privateUrlGet(datasetId, apiToken); + tokenForPrivateUrlUser = JsonPath.from(privateUrlGet.body().asString()).getString("data.token"); + + String testAnonymizedValue = "testAnonymizedValue"; + Response getPrivateUrlDatasetVersionAnonymizedResponse = UtilIT.getPrivateUrlDatasetVersion(tokenForPrivateUrlUser, testAnonymizedValue); + + // We verify that the response is anonymized for the author field + getPrivateUrlDatasetVersionAnonymizedResponse.then().assertThat() + .body("data.datasetId", equalTo(datasetId)) + .body("data.metadataBlocks.citation.fields[1].value", equalTo(testAnonymizedValue)) + .body("data.metadataBlocks.citation.fields[1].typeClass", equalTo("primitive")) + .body("data.metadataBlocks.citation.fields[1].multiple", equalTo(false)) + .statusCode(OK.getStatusCode()); + + UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java index 83dfc5fd889..07e8ef41d92 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java @@ -404,7 +404,7 @@ public void testAPITokenEndpoints() { createDatasetResponse.prettyPrint(); Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - Response createPrivateUrl = UtilIT.privateUrlCreate(datasetId, apiToken); + Response createPrivateUrl = UtilIT.privateUrlCreate(datasetId, apiToken, false); createPrivateUrl.prettyPrint(); assertEquals(OK.getStatusCode(), createPrivateUrl.getStatusCode()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 7c45155a672..ceb2a386f92 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1693,9 +1693,10 @@ static Response privateUrlGet(Integer datasetId, String apiToken) { return response; } - static Response privateUrlCreate(Integer datasetId, String apiToken) { + static Response privateUrlCreate(Integer datasetId, String apiToken, boolean anonymizedAccess) { Response response = given() .header(API_TOKEN_HTTP_HEADER, apiToken) + .queryParam("anonymizedAccess", anonymizedAccess) .post("/api/datasets/" + datasetId + "/privateUrl"); return response; } @@ -3197,4 +3198,12 @@ static Response getDatasetSummaryFieldNames() { .get("/api/datasets/summaryFieldNames"); return response; } + + static Response getPrivateUrlDatasetVersion(String privateUrlToken, String anonymizedFieldValue) { + Response response = given() + .contentType("application/json") + .queryParam("anonymizedFieldValue", anonymizedFieldValue) + .get("/api/datasets/privateUrlDatasetVersion/" + privateUrlToken); + return response; + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java index 5e59e044095..46bce999c60 100644 --- a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java @@ -4,7 +4,6 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetFieldType; -import edu.harvard.iq.dataverse.DatasetFieldValue; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; @@ -177,34 +176,4 @@ public void testGetDatasetSummaryFieldNames_notEmptyCustomFields() { assertArrayEquals(expected, actual); } - - @Test - public void testAnonymizeDatasetVersion() { - DatasetVersion testDatasetVersion = new DatasetVersion(); - - List testDatasetFields = new ArrayList<>(); - String[] fieldNames = {"author", "subject", "keyword"}; - for (String fieldName : fieldNames) { - DatasetField datasetField = DatasetField.createNewEmptyDatasetField(new DatasetFieldType(fieldName, FieldType.TEXT, false), testDatasetVersion); - DatasetFieldValue datasetFieldValue = new DatasetFieldValue(datasetField, "testValue"); - datasetField.setDatasetFieldValues(List.of(datasetFieldValue)); - testDatasetFields.add(datasetField); - } - testDatasetVersion.setDatasetFields(testDatasetFields); - - String testAnonymizedFieldNames = "subject, keyword"; - String testAnonymizedFieldValue = "testValueToAnonymize"; - DatasetVersion actualVersion = DatasetUtil.anonymizeDatasetVersion(testDatasetVersion, testAnonymizedFieldNames, testAnonymizedFieldValue); - - // We check that the fields to be anonymized are successfully anonymized and others remain as originally - List actualVersionDatasetFields = actualVersion.getDatasetFields(); - for (DatasetField datasetField : actualVersionDatasetFields) { - String datasetFieldValue = datasetField.getDatasetFieldValues().get(0).getValue(); - if (testAnonymizedFieldNames.contains(datasetField.getDatasetFieldType().getName())) { - assertEquals(testAnonymizedFieldValue, datasetFieldValue); - } else { - assertNotEquals(testAnonymizedFieldValue, datasetFieldValue); - } - } - } } diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java index cbefd3be0ad..741426558ab 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java @@ -28,6 +28,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; public class JsonPrinterTest { @@ -319,4 +320,33 @@ public void testEnum() throws JsonParseException { assertTrue(typesSet.contains("ASSIGNROLE")); } + @Test + public void testMetadataBlockAnonymized() { + MetadataBlock block = new MetadataBlock(); + block.setName("citation"); + List fields = new ArrayList<>(); + DatasetField datasetAuthorField = new DatasetField(); + DatasetFieldType datasetAuthorFieldType = datasetFieldTypeSvc.findByName("author"); + datasetAuthorFieldType.setMetadataBlock(block); + datasetAuthorField.setDatasetFieldType(datasetAuthorFieldType); + List compoundValues = new LinkedList<>(); + DatasetFieldCompoundValue compoundValue = new DatasetFieldCompoundValue(); + compoundValue.setParentDatasetField(datasetAuthorField); + compoundValue.setChildDatasetFields(Arrays.asList( + constructPrimitive("authorName", "Test Author"), + constructPrimitive("authorAffiliation", "Test Affiliation") + )); + compoundValues.add(compoundValue); + datasetAuthorField.setDatasetFieldCompoundValues(compoundValues); + fields.add(datasetAuthorField); + + String testAnonymizedFieldValue = "test"; + JsonObject actualJsonObject = JsonPrinter.json(block, fields, List.of("author"), testAnonymizedFieldValue).build(); + + assertNotNull(actualJsonObject); + JsonObject actualAuthorJsonObject = actualJsonObject.getJsonArray("fields").getJsonObject(0); + assertEquals(testAnonymizedFieldValue, actualAuthorJsonObject.getString("value")); + assertEquals("primitive", actualAuthorJsonObject.getString("typeClass")); + assertFalse(actualAuthorJsonObject.getBoolean("multiple")); + } } From ac6354645c2bc8557e4cab19d2d28304605e634b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 16:09:29 +0200 Subject: [PATCH 0211/1525] fix(test,oidc): replace Keycloak Demo/Dev realm The realm used before (created with Keycloak 16) was for some reason not compatible with Keycloak 20+. A new "Test" realm was created with more users and working with Keycloak 20 and 21. All files to run Keycloak have been adapted in version and realm import file. --- conf/keycloak/docker-compose.yml | 12 +- .../keycloak/oidc-keycloak-auth-provider.json | 2 +- conf/keycloak/oidc-realm.json | 2108 ----------------- conf/keycloak/run-keycloak.sh | 4 +- conf/keycloak/test-realm.json | 1939 +++++++++++++++ docker-compose-dev.yml | 6 +- 6 files changed, 1952 insertions(+), 2119 deletions(-) delete mode 100644 conf/keycloak/oidc-realm.json create mode 100644 conf/keycloak/test-realm.json diff --git a/conf/keycloak/docker-compose.yml b/conf/keycloak/docker-compose.yml index 2776f6572df..12b2382bd3d 100644 --- a/conf/keycloak/docker-compose.yml +++ b/conf/keycloak/docker-compose.yml @@ -3,13 +3,15 @@ version: "3.9" services: keycloak: - image: 'jboss/keycloak:16.1.1' + image: 'quay.io/keycloak/keycloak:21.0' + command: + - "start-dev" + - "--import-realm" environment: - - KEYCLOAK_USER=kcadmin - - KEYCLOAK_PASSWORD=kcpassword - - KEYCLOAK_IMPORT=/tmp/oidc-realm.json + - KEYCLOAK_ADMIN=kcadmin + - KEYCLOAK_ADMIN_PASSWORD=kcpassword - KEYCLOAK_LOGLEVEL=DEBUG ports: - "8090:8080" volumes: - - './oidc-realm.json:/tmp/oidc-realm.json' + - './test-realm.json:/opt/keycloak/data/import/test-realm.json' diff --git a/conf/keycloak/oidc-keycloak-auth-provider.json b/conf/keycloak/oidc-keycloak-auth-provider.json index 7d09fe5f36e..7e01bd4c325 100644 --- a/conf/keycloak/oidc-keycloak-auth-provider.json +++ b/conf/keycloak/oidc-keycloak-auth-provider.json @@ -3,6 +3,6 @@ "factoryAlias": "oidc", "title": "OIDC-Keycloak", "subtitle": "OIDC-Keycloak", - "factoryData": "type: oidc | issuer: http://keycloak.mydomain.com:8090/realms/oidc-realm | clientId: oidc-client | clientSecret: ss6gE8mODCDfqesQaSG3gwUwZqZt547E", + "factoryData": "type: oidc | issuer: http://keycloak.mydomain.com:8090/realms/test | clientId: test | clientSecret: 94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8", "enabled": true } diff --git a/conf/keycloak/oidc-realm.json b/conf/keycloak/oidc-realm.json deleted file mode 100644 index 1b77f2b4384..00000000000 --- a/conf/keycloak/oidc-realm.json +++ /dev/null @@ -1,2108 +0,0 @@ -{ - "id": "oidc-realm", - "realm": "oidc-realm", - "notBefore": 0, - "defaultSignatureAlgorithm": "RS256", - "revokeRefreshToken": false, - "refreshTokenMaxReuse": 0, - "accessTokenLifespan": 300, - "accessTokenLifespanForImplicitFlow": 900, - "ssoSessionIdleTimeout": 1800, - "ssoSessionMaxLifespan": 36000, - "ssoSessionIdleTimeoutRememberMe": 0, - "ssoSessionMaxLifespanRememberMe": 0, - "offlineSessionIdleTimeout": 2592000, - "offlineSessionMaxLifespanEnabled": false, - "offlineSessionMaxLifespan": 5184000, - "clientSessionIdleTimeout": 0, - "clientSessionMaxLifespan": 0, - "clientOfflineSessionIdleTimeout": 0, - "clientOfflineSessionMaxLifespan": 0, - "accessCodeLifespan": 60, - "accessCodeLifespanUserAction": 300, - "accessCodeLifespanLogin": 1800, - "actionTokenGeneratedByAdminLifespan": 43200, - "actionTokenGeneratedByUserLifespan": 300, - "oauth2DeviceCodeLifespan": 600, - "oauth2DevicePollingInterval": 5, - "enabled": true, - "sslRequired": "external", - "registrationAllowed": false, - "registrationEmailAsUsername": false, - "rememberMe": false, - "verifyEmail": false, - "loginWithEmailAllowed": true, - "duplicateEmailsAllowed": false, - "resetPasswordAllowed": false, - "editUsernameAllowed": false, - "bruteForceProtected": false, - "permanentLockout": false, - "maxFailureWaitSeconds": 900, - "minimumQuickLoginWaitSeconds": 60, - "waitIncrementSeconds": 60, - "quickLoginCheckMilliSeconds": 1000, - "maxDeltaTimeSeconds": 43200, - "failureFactor": 30, - "roles": { - "realm": [ - { - "id": "13d76240-fcf8-4361-9dbf-de268717cfb2", - "name": "uma_authorization", - "description": "${role_uma_authorization}", - "composite": false, - "clientRole": false, - "containerId": "oidc-realm", - "attributes": {} - }, - { - "id": "88b414c4-3516-4486-8f8b-a811ed0e0ce5", - "name": "default-roles-oidc-realm", - "description": "${role_default-roles}", - "composite": true, - "composites": { - "realm": [ - "offline_access", - "uma_authorization" - ] - }, - "clientRole": false, - "containerId": "oidc-realm", - "attributes": {} - }, - { - "id": "b907fd4e-0e54-461c-9411-3f736eef7d2f", - "name": "offline_access", - "description": "${role_offline-access}", - "composite": false, - "clientRole": false, - "containerId": "oidc-realm", - "attributes": {} - } - ], - "client": { - "realm-management": [ - { - "id": "39342ea9-0b4e-4841-8996-433759e9297f", - "name": "create-client", - "description": "${role_create-client}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "f8680034-617d-45d3-9801-7bf0d704c549", - "name": "manage-users", - "description": "${role_manage-users}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "b08e4cc3-71e2-4395-b66b-fb1277b48b88", - "name": "manage-realm", - "description": "${role_manage-realm}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "c15dc407-d012-43af-9a21-a2923e1d7b74", - "name": "manage-events", - "description": "${role_manage-events}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "66c07cb7-42cd-4155-8485-6cc7bd37cba9", - "name": "view-realm", - "description": "${role_view-realm}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "0419515f-4ab8-43ca-ac69-e842195813c0", - "name": "view-events", - "description": "${role_view-events}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "aa553d5a-b2dc-4f81-979a-2af0a019fee0", - "name": "impersonation", - "description": "${role_impersonation}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "9567e1e9-b755-43a8-93ed-d5929391316f", - "name": "manage-clients", - "description": "${role_manage-clients}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "e3dab69f-7323-4aad-bf98-8b7697f36d57", - "name": "query-users", - "description": "${role_query-users}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "ee8a4855-d0d5-4261-bdba-b419d304a824", - "name": "query-groups", - "description": "${role_query-groups}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "4f251212-e922-4ac0-9cce-3ada607648d2", - "name": "view-identity-providers", - "description": "${role_view-identity-providers}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "34e1dc59-a975-424f-887b-52465e184a4b", - "name": "realm-admin", - "description": "${role_realm-admin}", - "composite": true, - "composites": { - "client": { - "realm-management": [ - "create-client", - "manage-users", - "manage-realm", - "manage-events", - "view-realm", - "view-events", - "impersonation", - "manage-clients", - "query-users", - "view-identity-providers", - "query-groups", - "view-clients", - "view-users", - "manage-authorization", - "manage-identity-providers", - "query-realms", - "query-clients", - "view-authorization" - ] - } - }, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "d35aca04-0182-40d3-96b8-1ce5cc118729", - "name": "view-clients", - "description": "${role_view-clients}", - "composite": true, - "composites": { - "client": { - "realm-management": [ - "query-clients" - ] - } - }, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "7d3b28d5-471a-4b2b-bc80-56d4ff80fd28", - "name": "view-users", - "description": "${role_view-users}", - "composite": true, - "composites": { - "client": { - "realm-management": [ - "query-users", - "query-groups" - ] - } - }, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "651059eb-fc1a-4f8d-9ced-ed28b0a2f965", - "name": "manage-authorization", - "description": "${role_manage-authorization}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "73f447e9-def8-4214-8516-56571f2c6f65", - "name": "manage-identity-providers", - "description": "${role_manage-identity-providers}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "1b5f7c39-885e-4246-8cf5-25769544fc3d", - "name": "query-realms", - "description": "${role_query-realms}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "350da4c1-69d4-4557-a9a8-8ba760db0225", - "name": "query-clients", - "description": "${role_query-clients}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "43d51082-6922-4765-8022-529d91a4603f", - "name": "view-authorization", - "description": "${role_view-authorization}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - } - ], - "security-admin-console": [], - "admin-cli": [], - "account-console": [], - "broker": [], - "oidc-client": [], - "account": [ - { - "id": "a163535c-71de-4b2d-9530-26b25eeb1c1e", - "name": "delete-account", - "description": "${role_delete-account}", - "composite": false, - "clientRole": true, - "containerId": "aed2e103-ee29-4d5c-a34e-1b8c65b7d537", - "attributes": {} - }, - { - "id": "851c6a9f-bce7-4c70-be82-084c25d61b25", - "name": "manage-account", - "composite": false, - "clientRole": true, - "containerId": "aed2e103-ee29-4d5c-a34e-1b8c65b7d537", - "attributes": {} - } - ] - } - }, - "groups": [], - "defaultRole": { - "id": "88b414c4-3516-4486-8f8b-a811ed0e0ce5", - "name": "default-roles-oidc-realm", - "description": "${role_default-roles}", - "composite": true, - "clientRole": false, - "containerId": "oidc-realm" - }, - "requiredCredentials": [ - "password" - ], - "otpPolicyType": "totp", - "otpPolicyAlgorithm": "HmacSHA1", - "otpPolicyInitialCounter": 0, - "otpPolicyDigits": 6, - "otpPolicyLookAheadWindow": 1, - "otpPolicyPeriod": 30, - "otpSupportedApplications": [ - "FreeOTP", - "Google Authenticator" - ], - "webAuthnPolicyRpEntityName": "keycloak", - "webAuthnPolicySignatureAlgorithms": [ - "ES256" - ], - "webAuthnPolicyRpId": "", - "webAuthnPolicyAttestationConveyancePreference": "not specified", - "webAuthnPolicyAuthenticatorAttachment": "not specified", - "webAuthnPolicyRequireResidentKey": "not specified", - "webAuthnPolicyUserVerificationRequirement": "not specified", - "webAuthnPolicyCreateTimeout": 0, - "webAuthnPolicyAvoidSameAuthenticatorRegister": false, - "webAuthnPolicyAcceptableAaguids": [], - "webAuthnPolicyPasswordlessRpEntityName": "keycloak", - "webAuthnPolicyPasswordlessSignatureAlgorithms": [ - "ES256" - ], - "webAuthnPolicyPasswordlessRpId": "", - "webAuthnPolicyPasswordlessAttestationConveyancePreference": "not specified", - "webAuthnPolicyPasswordlessAuthenticatorAttachment": "not specified", - "webAuthnPolicyPasswordlessRequireResidentKey": "not specified", - "webAuthnPolicyPasswordlessUserVerificationRequirement": "not specified", - "webAuthnPolicyPasswordlessCreateTimeout": 0, - "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister": false, - "webAuthnPolicyPasswordlessAcceptableAaguids": [], - "users": [ - { - "username": "kcuser", - "enabled": true, - "totp": false, - "emailVerified": true, - "firstName": "Test", - "lastName": "Test", - "email": "test@test.com", - "credentials": [ - { - "type": "password", - "value": "kcpassword" - } - ] - } - ], - "scopeMappings": [ - { - "clientScope": "offline_access", - "roles": [ - "offline_access" - ] - } - ], - "clientScopeMappings": { - "account": [ - { - "client": "account-console", - "roles": [ - "manage-account" - ] - } - ] - }, - "clients": [ - { - "id": "aed2e103-ee29-4d5c-a34e-1b8c65b7d537", - "clientId": "account", - "name": "${client_account}", - "rootUrl": "${authBaseUrl}", - "baseUrl": "/realms/oidc-realm/account/", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [ - "/realms/oidc-realm/account/*" - ], - "webOrigins": [], - "notBefore": 0, - "bearerOnly": false, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": true, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": {}, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "1e821c0e-f6b9-4324-9b23-e82b5431fb72", - "clientId": "account-console", - "name": "${client_account-console}", - "rootUrl": "${authBaseUrl}", - "baseUrl": "/realms/oidc-realm/account/", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [ - "/realms/oidc-realm/account/*" - ], - "webOrigins": [], - "notBefore": 0, - "bearerOnly": false, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": true, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": { - "pkce.code.challenge.method": "S256" - }, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "protocolMappers": [ - { - "id": "397616ab-4124-4a13-92b6-317423e818a3", - "name": "audience resolve", - "protocol": "openid-connect", - "protocolMapper": "oidc-audience-resolve-mapper", - "consentRequired": false, - "config": {} - } - ], - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "dddcc3e0-d742-422b-8b5f-84a292ea9d66", - "clientId": "admin-cli", - "name": "${client_admin-cli}", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [], - "webOrigins": [], - "notBefore": 0, - "bearerOnly": false, - "consentRequired": false, - "standardFlowEnabled": false, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": true, - "serviceAccountsEnabled": false, - "publicClient": true, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": {}, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "df6f6cd0-a046-492f-84ac-b4fe31909be4", - "clientId": "broker", - "name": "${client_broker}", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [], - "webOrigins": [], - "notBefore": 0, - "bearerOnly": true, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": false, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": {}, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "c0af31b9-21aa-4e70-baf3-8d68850c4081", - "clientId": "oidc-client", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "secret": "ss6gE8mODCDfqesQaSG3gwUwZqZt547E", - "redirectUris": [ - "*" - ], - "webOrigins": [ - "+" - ], - "notBefore": 0, - "bearerOnly": false, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": false, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": { - "saml.force.post.binding": "false", - "saml.multivalued.roles": "false", - "oauth2.device.authorization.grant.enabled": "false", - "use.jwks.url": "true", - "backchannel.logout.revoke.offline.tokens": "false", - "saml.server.signature.keyinfo.ext": "false", - "use.refresh.tokens": "true", - "jwt.credential.certificate": "MIICpTCCAY0CBgGE8V6o6TANBgkqhkiG9w0BAQsFADAWMRQwEgYDVQQDDAtvaWRjLWNsaWVudDAeFw0yMjEyMDgxMDUyMDNaFw0zMjEyMDgxMDUzNDNaMBYxFDASBgNVBAMMC29pZGMtY2xpZW50MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArUffTl+jXWzyY3T4VVtkiGyNnY+RgyAXUzz+dxT7wUQaYSiNPvmaxnio555pWjR403SRUjVxM8eJYgHK9s43qQWdheXBIHyLKaQfjVsTtSmHgFtPmjk+kweQs6fxUi5CNvtx4RTCaOK5wV8q5q1X7mb8cZ5+gLSx1f/pHtayFXMT75nV04aZKWgPztPz8w+QXUx9cuFY4OIiTdRbdyfr1oOiDtMbxxA22tggB/HSMVkSckT3LSPj7fJKJMPFYi/g1AXxGipX/q8XkmOBrvNePCpH0F/IZbC1vXEsDC6urfoijOdiZgPMobuADmWHPiw2zgCN8qa6QuLFaI+JduXT9QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQCEOYRHkH8DnBucb+uN5c9U/fZY+mpglxzZvby7dGBXfVwLN+eP1kGcQPaFi+nshk7FgF4mR5/cmuAPZt+YBbgP0z37D49nB7S6sniwzfhCAAplOT4vmm+MjperTDsWFUGhQZJvN/jxqP2Xccw7N//ReYi7yOlmWhwGyqQyTi0ySbE3BY5eFvUKepekybYi/15XlyF8lwS2jH1MvnJAxAMNVpVUcP4wTnq/dOw5ybrVWF0mPnA8KVzTPuPE5nzZvZ3rkXQeEJTffIToR+T/DH/KTLXcNUtx4nG0ajJ0gM6iVAXGnKlI9Viq/M5Ese+52I6rQmxTsFMn57LNzKgMpWcE", - "oidc.ciba.grant.enabled": "false", - "use.jwks.string": "false", - "backchannel.logout.session.required": "false", - "client_credentials.use_refresh_token": "false", - "require.pushed.authorization.requests": "false", - "saml.client.signature": "false", - "id.token.as.detached.signature": "false", - "saml.assertion.signature": "false", - "saml.encrypt": "false", - "saml.server.signature": "false", - "exclude.session.state.from.auth.response": "false", - "saml.artifact.binding": "false", - "saml_force_name_id_format": "false", - "tls.client.certificate.bound.access.tokens": "false", - "saml.authnstatement": "false", - "display.on.consent.screen": "false", - "saml.onetimeuse.condition": "false" - }, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": true, - "nodeReRegistrationTimeout": -1, - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "clientId": "realm-management", - "name": "${client_realm-management}", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [], - "webOrigins": [], - "notBefore": 0, - "bearerOnly": true, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": false, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": {}, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "3747f98f-efbb-49ef-8238-a349bf5ab409", - "clientId": "security-admin-console", - "name": "${client_security-admin-console}", - "rootUrl": "${authAdminUrl}", - "baseUrl": "/admin/oidc-realm/console/", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [ - "/admin/oidc-realm/console/*" - ], - "webOrigins": [ - "+" - ], - "notBefore": 0, - "bearerOnly": false, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": true, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": { - "pkce.code.challenge.method": "S256" - }, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "protocolMappers": [ - { - "id": "2fbdf6c9-ee69-4edc-b780-ec62aecfc519", - "name": "locale", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "locale", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "locale", - "jsonType.label": "String" - } - } - ], - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - } - ], - "clientScopes": [ - { - "id": "f76f507d-7d1c-495b-9504-47830b3834f1", - "name": "phone", - "description": "OpenID Connect built-in scope: phone", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "true", - "display.on.consent.screen": "true", - "consent.screen.text": "${phoneScopeConsentText}" - }, - "protocolMappers": [ - { - "id": "be849ec8-1747-4efb-bc00-beeaf44f11c8", - "name": "phone number verified", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "phoneNumberVerified", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "phone_number_verified", - "jsonType.label": "boolean" - } - }, - { - "id": "8e8600ec-4290-435d-b109-9f0547cb4a1d", - "name": "phone number", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "phoneNumber", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "phone_number", - "jsonType.label": "String" - } - } - ] - }, - { - "id": "54b87197-5309-4b2c-8ad9-f561a0fc178a", - "name": "role_list", - "description": "SAML role list", - "protocol": "saml", - "attributes": { - "consent.screen.text": "${samlRoleListScopeConsentText}", - "display.on.consent.screen": "true" - }, - "protocolMappers": [ - { - "id": "5fd831af-19a5-4a9c-b44f-2a806fae011c", - "name": "role list", - "protocol": "saml", - "protocolMapper": "saml-role-list-mapper", - "consentRequired": false, - "config": { - "single": "false", - "attribute.nameformat": "Basic", - "attribute.name": "Role" - } - } - ] - }, - { - "id": "2f85470d-8cb7-4f07-8602-47342d68af86", - "name": "web-origins", - "description": "OpenID Connect scope for add allowed web origins to the access token", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "false", - "display.on.consent.screen": "false", - "consent.screen.text": "" - }, - "protocolMappers": [ - { - "id": "c5d2aafc-f72d-4d7b-9d88-cd759f0e045e", - "name": "allowed web origins", - "protocol": "openid-connect", - "protocolMapper": "oidc-allowed-origins-mapper", - "consentRequired": false, - "config": {} - } - ] - }, - { - "id": "528face9-229a-4adf-98d8-68b1a22e880d", - "name": "microprofile-jwt", - "description": "Microprofile - JWT built-in scope", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "true", - "display.on.consent.screen": "false" - }, - "protocolMappers": [ - { - "id": "89240a7c-10f3-4e09-9d6b-41955b86c58d", - "name": "groups", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-realm-role-mapper", - "consentRequired": false, - "config": { - "multivalued": "true", - "userinfo.token.claim": "true", - "user.attribute": "foo", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "groups", - "jsonType.label": "String" - } - }, - { - "id": "15b6db72-4870-480e-a675-87f87df5f8a5", - "name": "upn", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "username", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "upn", - "jsonType.label": "String" - } - } - ] - }, - { - "id": "cdd11477-b02b-4886-bc6d-cf4b728ebc0e", - "name": "email", - "description": "OpenID Connect built-in scope: email", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "true", - "display.on.consent.screen": "true", - "consent.screen.text": "${emailScopeConsentText}" - }, - "protocolMappers": [ - { - "id": "627b9f4f-23d6-4480-adf4-264faf58de33", - "name": "email verified", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "emailVerified", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "email_verified", - "jsonType.label": "boolean" - } - }, - { - "id": "6a2adf2e-db2d-4ebe-8d48-f658f9b4a5ca", - "name": "email", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "email", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "email", - "jsonType.label": "String" - } - } - ] - }, - { - "id": "8f830142-b3f1-40f0-82e2-ceed68857a40", - "name": "roles", - "description": "OpenID Connect scope for add user roles to the access token", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "false", - "display.on.consent.screen": "true", - "consent.screen.text": "${rolesScopeConsentText}" - }, - "protocolMappers": [ - { - "id": "28a96dc6-c4dc-4aae-b316-28b56dccd077", - "name": "audience resolve", - "protocol": "openid-connect", - "protocolMapper": "oidc-audience-resolve-mapper", - "consentRequired": false, - "config": {} - }, - { - "id": "3e81050f-540e-4f3d-9abf-86406e484f76", - "name": "realm roles", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-realm-role-mapper", - "consentRequired": false, - "config": { - "user.attribute": "foo", - "access.token.claim": "true", - "claim.name": "realm_access.roles", - "jsonType.label": "String", - "multivalued": "true" - } - }, - { - "id": "13afa1f4-3fac-4c90-a9b4-e84e682f46e9", - "name": "client roles", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-client-role-mapper", - "consentRequired": false, - "config": { - "user.attribute": "foo", - "access.token.claim": "true", - "claim.name": "resource_access.${client_id}.roles", - "jsonType.label": "String", - "multivalued": "true" - } - } - ] - }, - { - "id": "3beac2fc-e947-408f-8422-ca9a1e66a258", - "name": "address", - "description": "OpenID Connect built-in scope: address", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "true", - "display.on.consent.screen": "true", - "consent.screen.text": "${addressScopeConsentText}" - }, - "protocolMappers": [ - { - "id": "12911891-db5c-4a35-80fa-555c5eda7e68", - "name": "address", - "protocol": "openid-connect", - "protocolMapper": "oidc-address-mapper", - "consentRequired": false, - "config": { - "user.attribute.formatted": "formatted", - "user.attribute.country": "country", - "user.attribute.postal_code": "postal_code", - "userinfo.token.claim": "true", - "user.attribute.street": "street", - "id.token.claim": "true", - "user.attribute.region": "region", - "access.token.claim": "true", - "user.attribute.locality": "locality" - } - } - ] - }, - { - "id": "8a29297a-e6f6-41ae-b25d-8a14236de535", - "name": "offline_access", - "description": "OpenID Connect built-in scope: offline_access", - "protocol": "openid-connect", - "attributes": { - "consent.screen.text": "${offlineAccessScopeConsentText}", - "display.on.consent.screen": "true" - } - }, - { - "id": "ce1622c5-701f-4e3e-9d2d-8dae0f07a295", - "name": "profile", - "description": "OpenID Connect built-in scope: profile", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "true", - "display.on.consent.screen": "true", - "consent.screen.text": "${profileScopeConsentText}" - }, - "protocolMappers": [ - { - "id": "98cc62b8-250a-4087-92da-bb0f0931e675", - "name": "full name", - "protocol": "openid-connect", - "protocolMapper": "oidc-full-name-mapper", - "consentRequired": false, - "config": { - "id.token.claim": "true", - "access.token.claim": "true", - "userinfo.token.claim": "true" - } - }, - { - "id": "b99c8c44-4cc9-4c87-a5a1-c14e64d472ae", - "name": "given name", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "firstName", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "given_name", - "jsonType.label": "String" - } - }, - { - "id": "903d5932-bdec-42bc-a53c-3cce93deaa1c", - "name": "zoneinfo", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "zoneinfo", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "zoneinfo", - "jsonType.label": "String" - } - }, - { - "id": "ccbdc095-28f7-4769-8261-2e32c7b6fab0", - "name": "picture", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "picture", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "picture", - "jsonType.label": "String" - } - }, - { - "id": "22a4a38c-f755-44f3-b847-803c7fb3cef5", - "name": "birthdate", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "birthdate", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "birthdate", - "jsonType.label": "String" - } - }, - { - "id": "78726920-b4e2-4ed2-b9e0-df38a7f82376", - "name": "updated at", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "updatedAt", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "updated_at", - "jsonType.label": "String" - } - }, - { - "id": "c64c6eb8-5cbe-4092-bf2c-dd02b8c0e0e8", - "name": "family name", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "lastName", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "family_name", - "jsonType.label": "String" - } - }, - { - "id": "306784d8-8da1-48d8-92a3-dccfff83bcaf", - "name": "middle name", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "middleName", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "middle_name", - "jsonType.label": "String" - } - }, - { - "id": "0ff127fa-774e-43a8-a1fc-47ea3f307aa1", - "name": "website", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "website", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "website", - "jsonType.label": "String" - } - }, - { - "id": "8989c6f8-25c5-4d02-aa06-25b3b77fc227", - "name": "profile", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "profile", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "profile", - "jsonType.label": "String" - } - }, - { - "id": "3b67000c-9cbf-43ee-9e05-26f560871897", - "name": "gender", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "gender", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "gender", - "jsonType.label": "String" - } - }, - { - "id": "c28b04de-2770-423e-9b9a-b3321d7300e2", - "name": "nickname", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "nickname", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "nickname", - "jsonType.label": "String" - } - }, - { - "id": "fd791ed4-d4ab-4df9-81b4-c69a3134bcab", - "name": "username", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "username", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "preferred_username", - "jsonType.label": "String" - } - }, - { - "id": "c7378ce5-3673-47b2-9ebc-92c772bebf9f", - "name": "locale", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "locale", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "locale", - "jsonType.label": "String" - } - } - ] - } - ], - "defaultDefaultClientScopes": [ - "web-origins", - "role_list", - "roles", - "email", - "profile" - ], - "defaultOptionalClientScopes": [ - "address", - "microprofile-jwt", - "offline_access", - "phone" - ], - "browserSecurityHeaders": { - "contentSecurityPolicyReportOnly": "", - "xContentTypeOptions": "nosniff", - "xRobotsTag": "none", - "xFrameOptions": "SAMEORIGIN", - "contentSecurityPolicy": "frame-src 'self'; frame-ancestors 'self'; object-src 'none';", - "xXSSProtection": "1; mode=block", - "strictTransportSecurity": "max-age=31536000; includeSubDomains" - }, - "smtpServer": {}, - "eventsEnabled": false, - "eventsListeners": [ - "jboss-logging" - ], - "enabledEventTypes": [], - "adminEventsEnabled": false, - "adminEventsDetailsEnabled": false, - "identityProviders": [], - "identityProviderMappers": [], - "components": { - "org.keycloak.services.clientregistration.policy.ClientRegistrationPolicy": [ - { - "id": "8e2d0c22-0627-4115-9f14-4225244333d9", - "name": "Trusted Hosts", - "providerId": "trusted-hosts", - "subType": "anonymous", - "subComponents": {}, - "config": { - "host-sending-registration-request-must-match": [ - "true" - ], - "client-uris-must-match": [ - "true" - ] - } - }, - { - "id": "45bdde87-a364-4d66-a12e-1a4fd42c85fb", - "name": "Full Scope Disabled", - "providerId": "scope", - "subType": "anonymous", - "subComponents": {}, - "config": {} - }, - { - "id": "7b7d3215-68d2-41db-bc0f-db0a45934a84", - "name": "Allowed Client Scopes", - "providerId": "allowed-client-templates", - "subType": "anonymous", - "subComponents": {}, - "config": { - "allow-default-scopes": [ - "true" - ] - } - }, - { - "id": "e067781a-6058-4f2b-9408-3390e9854cf8", - "name": "Consent Required", - "providerId": "consent-required", - "subType": "anonymous", - "subComponents": {}, - "config": {} - }, - { - "id": "296be954-8084-45c8-b6f3-94d53f7341f6", - "name": "Allowed Protocol Mapper Types", - "providerId": "allowed-protocol-mappers", - "subType": "anonymous", - "subComponents": {}, - "config": { - "allowed-protocol-mapper-types": [ - "saml-role-list-mapper", - "saml-user-property-mapper", - "oidc-usermodel-attribute-mapper", - "oidc-address-mapper", - "oidc-sha256-pairwise-sub-mapper", - "saml-user-attribute-mapper", - "oidc-usermodel-property-mapper", - "oidc-full-name-mapper" - ] - } - }, - { - "id": "b9a2a484-aee1-4633-aa37-a9ab2b74a239", - "name": "Allowed Client Scopes", - "providerId": "allowed-client-templates", - "subType": "authenticated", - "subComponents": {}, - "config": { - "allow-default-scopes": [ - "true" - ] - } - }, - { - "id": "016e4914-a32c-40fa-8aab-3eb25a411df5", - "name": "Max Clients Limit", - "providerId": "max-clients", - "subType": "anonymous", - "subComponents": {}, - "config": { - "max-clients": [ - "200" - ] - } - }, - { - "id": "a4fb2fa3-93b8-4497-8047-424f70f298c7", - "name": "Allowed Protocol Mapper Types", - "providerId": "allowed-protocol-mappers", - "subType": "authenticated", - "subComponents": {}, - "config": { - "allowed-protocol-mapper-types": [ - "oidc-sha256-pairwise-sub-mapper", - "oidc-full-name-mapper", - "saml-user-property-mapper", - "saml-role-list-mapper", - "oidc-usermodel-attribute-mapper", - "oidc-address-mapper", - "oidc-usermodel-property-mapper", - "saml-user-attribute-mapper" - ] - } - } - ], - "org.keycloak.keys.KeyProvider": [ - { - "id": "31b693fa-2b95-47a6-96a1-dfff868ca1df", - "name": "rsa-enc-generated", - "providerId": "rsa-enc-generated", - "subComponents": {}, - "config": { - "priority": [ - "100" - ], - "algorithm": [ - "RSA-OAEP" - ] - } - }, - { - "id": "f1e63d09-45a0-4382-8346-0408ee906649", - "name": "hmac-generated", - "providerId": "hmac-generated", - "subComponents": {}, - "config": { - "priority": [ - "100" - ], - "algorithm": [ - "HS256" - ] - } - }, - { - "id": "99084d92-06f5-4787-b932-a40b5377f3cb", - "name": "rsa-generated", - "providerId": "rsa-generated", - "subComponents": {}, - "config": { - "priority": [ - "100" - ] - } - }, - { - "id": "9887f1bf-b4f7-4646-9919-a9dbde13ce74", - "name": "aes-generated", - "providerId": "aes-generated", - "subComponents": {}, - "config": { - "priority": [ - "100" - ] - } - } - ] - }, - "internationalizationEnabled": false, - "supportedLocales": [], - "authenticationFlows": [ - { - "id": "a7f91199-178d-4399-8319-5063ffcc37b0", - "alias": "Account verification options", - "description": "Method with which to verity the existing account", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "idp-email-verification", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "ALTERNATIVE", - "priority": 20, - "flowAlias": "Verify Existing Account by Re-authentication", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "602533e3-f7a1-4e25-9a12-f3080eeccec3", - "alias": "Authentication Options", - "description": "Authentication options.", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "basic-auth", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "basic-auth-otp", - "authenticatorFlow": false, - "requirement": "DISABLED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "auth-spnego", - "authenticatorFlow": false, - "requirement": "DISABLED", - "priority": 30, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "ba7bcdfd-05c6-4da6-827b-24e3513bddbe", - "alias": "Browser - Conditional OTP", - "description": "Flow to determine if the OTP is required for the authentication", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "conditional-user-configured", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "auth-otp-form", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "d0f62327-ef2f-4561-8b5a-1f61faecdac0", - "alias": "Direct Grant - Conditional OTP", - "description": "Flow to determine if the OTP is required for the authentication", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "conditional-user-configured", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "direct-grant-validate-otp", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "f10b85d0-26ee-4648-b81b-80213b066d76", - "alias": "First broker login - Conditional OTP", - "description": "Flow to determine if the OTP is required for the authentication", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "conditional-user-configured", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "auth-otp-form", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "d6af4ac0-f6bc-4197-bf01-6e2c321ecaad", - "alias": "Handle Existing Account", - "description": "Handle what to do if there is existing account with same email/username like authenticated identity provider", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "idp-confirm-link", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "REQUIRED", - "priority": 20, - "flowAlias": "Account verification options", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "501ab743-2e2f-427d-820f-14deed111b08", - "alias": "Reset - Conditional OTP", - "description": "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "conditional-user-configured", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "reset-otp", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "e02c3a63-a09d-4dde-9f6c-22c95eef8534", - "alias": "User creation or linking", - "description": "Flow for the existing/non-existing user alternatives", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticatorConfig": "create unique user config", - "authenticator": "idp-create-user-if-unique", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "ALTERNATIVE", - "priority": 20, - "flowAlias": "Handle Existing Account", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "c348906d-6266-4e68-937e-8f3d15c66524", - "alias": "Verify Existing Account by Re-authentication", - "description": "Reauthentication of existing account", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "idp-username-password-form", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "CONDITIONAL", - "priority": 20, - "flowAlias": "First broker login - Conditional OTP", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "cf6ba166-43d5-4687-95c4-0a184ca08885", - "alias": "browser", - "description": "browser based authentication", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "auth-cookie", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "auth-spnego", - "authenticatorFlow": false, - "requirement": "DISABLED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "identity-provider-redirector", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 25, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "ALTERNATIVE", - "priority": 30, - "flowAlias": "forms", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "87cb4f25-9275-4617-9e95-63adf1ce3ece", - "alias": "clients", - "description": "Base authentication for clients", - "providerId": "client-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "client-secret", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "client-jwt", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "client-secret-jwt", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 30, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "client-x509", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 40, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "e75b99c5-c566-4009-b0ba-c73716bed254", - "alias": "direct grant", - "description": "OpenID Connect Resource Owner Grant", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "direct-grant-validate-username", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "direct-grant-validate-password", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "CONDITIONAL", - "priority": 30, - "flowAlias": "Direct Grant - Conditional OTP", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "8a97380c-0f70-45cb-a7b0-780eb70453ba", - "alias": "docker auth", - "description": "Used by Docker clients to authenticate against the IDP", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "docker-http-basic-authenticator", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "131e0aad-5422-4504-bafc-96be2fa44c34", - "alias": "first broker login", - "description": "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticatorConfig": "review profile config", - "authenticator": "idp-review-profile", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "REQUIRED", - "priority": 20, - "flowAlias": "User creation or linking", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "e7d4b793-b3c2-4ec3-a2b1-04f7217e8f46", - "alias": "forms", - "description": "Username, password, otp and other auth forms.", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "auth-username-password-form", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "CONDITIONAL", - "priority": 20, - "flowAlias": "Browser - Conditional OTP", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "f59a7688-61a1-4ac9-a13a-03f92e022add", - "alias": "http challenge", - "description": "An authentication flow based on challenge-response HTTP Authentication Schemes", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "no-cookie-redirect", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "REQUIRED", - "priority": 20, - "flowAlias": "Authentication Options", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "80a7b0f5-abb3-4780-be58-4ed1dc3e50fa", - "alias": "registration", - "description": "registration flow", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "registration-page-form", - "authenticatorFlow": true, - "requirement": "REQUIRED", - "priority": 10, - "flowAlias": "registration form", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "f18231cf-b803-493b-9dd6-ee8fa602c861", - "alias": "registration form", - "description": "registration form", - "providerId": "form-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "registration-user-creation", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "registration-profile-action", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 40, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "registration-password-action", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 50, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "registration-recaptcha-action", - "authenticatorFlow": false, - "requirement": "DISABLED", - "priority": 60, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "34ccfce6-1488-4db3-b90e-d98e8d8b2ae6", - "alias": "reset credentials", - "description": "Reset credentials for a user if they forgot their password or something", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "reset-credentials-choose-user", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "reset-credential-email", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "reset-password", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 30, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "CONDITIONAL", - "priority": 40, - "flowAlias": "Reset - Conditional OTP", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "4468100c-fa83-4c16-8970-d53cb592f93a", - "alias": "saml ecp", - "description": "SAML ECP Profile Authentication Flow", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "http-basic-authenticator", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - } - ], - "authenticatorConfig": [ - { - "id": "c3bb087e-7fe9-4f13-b1bd-c2d7d1320054", - "alias": "create unique user config", - "config": { - "require.password.update.after.registration": "false" - } - }, - { - "id": "09820d9d-3c12-45f3-bc62-97b53f8a7efe", - "alias": "review profile config", - "config": { - "update.profile.on.first.login": "missing" - } - } - ], - "requiredActions": [ - { - "alias": "CONFIGURE_TOTP", - "name": "Configure OTP", - "providerId": "CONFIGURE_TOTP", - "enabled": true, - "defaultAction": false, - "priority": 10, - "config": {} - }, - { - "alias": "terms_and_conditions", - "name": "Terms and Conditions", - "providerId": "terms_and_conditions", - "enabled": false, - "defaultAction": false, - "priority": 20, - "config": {} - }, - { - "alias": "UPDATE_PASSWORD", - "name": "Update Password", - "providerId": "UPDATE_PASSWORD", - "enabled": true, - "defaultAction": false, - "priority": 30, - "config": {} - }, - { - "alias": "UPDATE_PROFILE", - "name": "Update Profile", - "providerId": "UPDATE_PROFILE", - "enabled": true, - "defaultAction": false, - "priority": 40, - "config": {} - }, - { - "alias": "VERIFY_EMAIL", - "name": "Verify Email", - "providerId": "VERIFY_EMAIL", - "enabled": true, - "defaultAction": false, - "priority": 50, - "config": {} - }, - { - "alias": "delete_account", - "name": "Delete Account", - "providerId": "delete_account", - "enabled": false, - "defaultAction": false, - "priority": 60, - "config": {} - }, - { - "alias": "update_user_locale", - "name": "Update User Locale", - "providerId": "update_user_locale", - "enabled": true, - "defaultAction": false, - "priority": 1000, - "config": {} - } - ], - "browserFlow": "browser", - "registrationFlow": "registration", - "directGrantFlow": "direct grant", - "resetCredentialsFlow": "reset credentials", - "clientAuthenticationFlow": "clients", - "dockerAuthenticationFlow": "docker auth", - "attributes": { - "cibaBackchannelTokenDeliveryMode": "poll", - "cibaExpiresIn": "120", - "cibaAuthRequestedUserHint": "login_hint", - "oauth2DeviceCodeLifespan": "600", - "clientOfflineSessionMaxLifespan": "0", - "oauth2DevicePollingInterval": "5", - "clientSessionIdleTimeout": "0", - "parRequestUriLifespan": "60", - "clientSessionMaxLifespan": "0", - "clientOfflineSessionIdleTimeout": "0", - "cibaInterval": "5" - }, - "keycloakVersion": "16.1.1", - "userManagedAccessAllowed": false, - "clientProfiles": { - "profiles": [] - }, - "clientPolicies": { - "policies": [] - } -} diff --git a/conf/keycloak/run-keycloak.sh b/conf/keycloak/run-keycloak.sh index effb37f91b8..ddc5108bee4 100755 --- a/conf/keycloak/run-keycloak.sh +++ b/conf/keycloak/run-keycloak.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -DOCKER_IMAGE="jboss/keycloak:16.1.1" +DOCKER_IMAGE="quay.io/keycloak/keycloak:21.0" KEYCLOAK_USER="kcadmin" KEYCLOAK_PASSWORD="kcpassword" KEYCLOAK_PORT=8090 @@ -11,7 +11,7 @@ if [ ! "$(docker ps -q -f name=^/keycloak$)" ]; then docker start keycloak echo "INFO - Keycloak container restarted" else - docker run -d --name keycloak -p $KEYCLOAK_PORT:8080 -e KEYCLOAK_USER=$KEYCLOAK_USER -e KEYCLOAK_PASSWORD=$KEYCLOAK_PASSWORD -e KEYCLOAK_IMPORT=/tmp/oidc-realm.json -v "$(pwd)"/oidc-realm.json:/tmp/oidc-realm.json $DOCKER_IMAGE + docker run -d --name keycloak -p $KEYCLOAK_PORT:8080 -e KEYCLOAK_USER=$KEYCLOAK_USER -e KEYCLOAK_PASSWORD=$KEYCLOAK_PASSWORD -e KEYCLOAK_IMPORT=/tmp/test-realm.json -v "$(pwd)"/test-realm.json:/tmp/test-realm.json $DOCKER_IMAGE echo "INFO - Keycloak container created and running" fi else diff --git a/conf/keycloak/test-realm.json b/conf/keycloak/test-realm.json new file mode 100644 index 00000000000..efe71cc5d29 --- /dev/null +++ b/conf/keycloak/test-realm.json @@ -0,0 +1,1939 @@ +{ + "id" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "realm" : "test", + "displayName" : "", + "displayNameHtml" : "", + "notBefore" : 0, + "defaultSignatureAlgorithm" : "RS256", + "revokeRefreshToken" : false, + "refreshTokenMaxReuse" : 0, + "accessTokenLifespan" : 300, + "accessTokenLifespanForImplicitFlow" : 900, + "ssoSessionIdleTimeout" : 1800, + "ssoSessionMaxLifespan" : 36000, + "ssoSessionIdleTimeoutRememberMe" : 0, + "ssoSessionMaxLifespanRememberMe" : 0, + "offlineSessionIdleTimeout" : 2592000, + "offlineSessionMaxLifespanEnabled" : false, + "offlineSessionMaxLifespan" : 5184000, + "clientSessionIdleTimeout" : 0, + "clientSessionMaxLifespan" : 0, + "clientOfflineSessionIdleTimeout" : 0, + "clientOfflineSessionMaxLifespan" : 0, + "accessCodeLifespan" : 60, + "accessCodeLifespanUserAction" : 300, + "accessCodeLifespanLogin" : 1800, + "actionTokenGeneratedByAdminLifespan" : 43200, + "actionTokenGeneratedByUserLifespan" : 300, + "oauth2DeviceCodeLifespan" : 600, + "oauth2DevicePollingInterval" : 5, + "enabled" : true, + "sslRequired" : "none", + "registrationAllowed" : false, + "registrationEmailAsUsername" : false, + "rememberMe" : false, + "verifyEmail" : false, + "loginWithEmailAllowed" : true, + "duplicateEmailsAllowed" : false, + "resetPasswordAllowed" : false, + "editUsernameAllowed" : false, + "bruteForceProtected" : false, + "permanentLockout" : false, + "maxFailureWaitSeconds" : 900, + "minimumQuickLoginWaitSeconds" : 60, + "waitIncrementSeconds" : 60, + "quickLoginCheckMilliSeconds" : 1000, + "maxDeltaTimeSeconds" : 43200, + "failureFactor" : 30, + "roles" : { + "realm" : [ { + "id" : "075daee1-5ab2-44b5-adbf-fa49a3da8305", + "name" : "uma_authorization", + "description" : "${role_uma_authorization}", + "composite" : false, + "clientRole" : false, + "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "attributes" : { } + }, { + "id" : "b4ff9091-ddf9-4536-b175-8cfa3e331d71", + "name" : "default-roles-test", + "description" : "${role_default-roles}", + "composite" : true, + "composites" : { + "realm" : [ "offline_access", "uma_authorization" ], + "client" : { + "account" : [ "view-profile", "manage-account" ] + } + }, + "clientRole" : false, + "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "attributes" : { } + }, { + "id" : "e6d31555-6be6-4dee-bc6a-40a53108e4c2", + "name" : "offline_access", + "description" : "${role_offline-access}", + "composite" : false, + "clientRole" : false, + "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "attributes" : { } + } ], + "client" : { + "realm-management" : [ { + "id" : "1955bd12-5f86-4a74-b130-d68a8ef6f0ee", + "name" : "impersonation", + "description" : "${role_impersonation}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "1109c350-9ab1-426c-9876-ef67d4310f35", + "name" : "view-authorization", + "description" : "${role_view-authorization}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "980c3fd3-1ae3-4b8f-9a00-d764c939035f", + "name" : "query-users", + "description" : "${role_query-users}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "5363e601-0f9d-4633-a8c8-28cb0f859b7b", + "name" : "query-groups", + "description" : "${role_query-groups}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "59aa7992-ad78-48db-868a-25d6e1d7db50", + "name" : "realm-admin", + "description" : "${role_realm-admin}", + "composite" : true, + "composites" : { + "client" : { + "realm-management" : [ "impersonation", "view-authorization", "query-users", "query-groups", "manage-clients", "manage-realm", "view-identity-providers", "query-realms", "manage-authorization", "manage-identity-providers", "manage-users", "view-users", "view-realm", "create-client", "view-clients", "manage-events", "query-clients", "view-events" ] + } + }, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "112f53c2-897d-4c01-81db-b8dc10c5b995", + "name" : "manage-clients", + "description" : "${role_manage-clients}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "c7f57bbd-ef32-4a64-9888-7b8abd90777a", + "name" : "manage-realm", + "description" : "${role_manage-realm}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "8885dac8-0af3-45af-94ce-eff5e801bb80", + "name" : "view-identity-providers", + "description" : "${role_view-identity-providers}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "2673346c-b0ef-4e01-8a90-be03866093af", + "name" : "manage-authorization", + "description" : "${role_manage-authorization}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "b7182885-9e57-445f-8dae-17c16eb31b5d", + "name" : "manage-identity-providers", + "description" : "${role_manage-identity-providers}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "ba7bfe0c-cb07-4a47-b92c-b8132b57e181", + "name" : "manage-users", + "description" : "${role_manage-users}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "13a8f0fc-647d-4bfe-b525-73956898e550", + "name" : "query-realms", + "description" : "${role_query-realms}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "ef4c57dc-78c2-4f9a-8d2b-0e97d46fc842", + "name" : "view-realm", + "description" : "${role_view-realm}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "2875da34-006c-4b7f-bfc8-9ae8e46af3a2", + "name" : "view-users", + "description" : "${role_view-users}", + "composite" : true, + "composites" : { + "client" : { + "realm-management" : [ "query-users", "query-groups" ] + } + }, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "c8c8f7dc-876b-4263-806f-3329f7cd5fd3", + "name" : "create-client", + "description" : "${role_create-client}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "21b84f90-5a9a-4845-a7ba-bbd98ac0fcc4", + "name" : "view-clients", + "description" : "${role_view-clients}", + "composite" : true, + "composites" : { + "client" : { + "realm-management" : [ "query-clients" ] + } + }, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "6fd64c94-d663-4501-ad77-0dcf8887d434", + "name" : "manage-events", + "description" : "${role_manage-events}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "b321927a-023c-4d2a-99ad-24baf7ff6d83", + "name" : "query-clients", + "description" : "${role_query-clients}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "2fc21160-78de-457b-8594-e5c76cde1d5e", + "name" : "view-events", + "description" : "${role_view-events}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + } ], + "test" : [ ], + "security-admin-console" : [ ], + "admin-cli" : [ ], + "account-console" : [ ], + "broker" : [ { + "id" : "07ee59b5-dca6-48fb-83d4-2994ef02850e", + "name" : "read-token", + "description" : "${role_read-token}", + "composite" : false, + "clientRole" : true, + "containerId" : "b57d62bb-77ff-42bd-b8ff-381c7288f327", + "attributes" : { } + } ], + "account" : [ { + "id" : "17d2f811-7bdf-4c73-83b4-1037001797b8", + "name" : "view-applications", + "description" : "${role_view-applications}", + "composite" : false, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "d1ff44f9-419e-42fd-98e8-1add1169a972", + "name" : "delete-account", + "description" : "${role_delete-account}", + "composite" : false, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "14c23a18-ae2d-43c9-b0c0-aaf6e0c7f5b0", + "name" : "manage-account-links", + "description" : "${role_manage-account-links}", + "composite" : false, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "6fbe58af-d2fe-4d66-95fe-a2e8a818cb55", + "name" : "view-profile", + "description" : "${role_view-profile}", + "composite" : false, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "bdfd02bc-6f6a-47d2-82bc-0ca52d78ff48", + "name" : "manage-consent", + "description" : "${role_manage-consent}", + "composite" : true, + "composites" : { + "client" : { + "account" : [ "view-consent" ] + } + }, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "782f3b0c-a17b-4a87-988b-1a711401f3b0", + "name" : "manage-account", + "description" : "${role_manage-account}", + "composite" : true, + "composites" : { + "client" : { + "account" : [ "manage-account-links" ] + } + }, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "8a3bfe15-66d9-4f3d-83ac-801d682d42b0", + "name" : "view-consent", + "description" : "${role_view-consent}", + "composite" : false, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + } ] + } + }, + "groups" : [ { + "id" : "d46f94c2-3b47-4288-b937-9cf918e54f0a", + "name" : "admins", + "path" : "/admins", + "attributes" : { }, + "realmRoles" : [ ], + "clientRoles" : { }, + "subGroups" : [ ] + }, { + "id" : "e992ce15-baac-48a0-8834-06f6fcf6c05b", + "name" : "curators", + "path" : "/curators", + "attributes" : { }, + "realmRoles" : [ ], + "clientRoles" : { }, + "subGroups" : [ ] + }, { + "id" : "531cf81d-a700-4336-808f-37a49709b48c", + "name" : "members", + "path" : "/members", + "attributes" : { }, + "realmRoles" : [ ], + "clientRoles" : { }, + "subGroups" : [ ] + } ], + "defaultRole" : { + "id" : "b4ff9091-ddf9-4536-b175-8cfa3e331d71", + "name" : "default-roles-test", + "description" : "${role_default-roles}", + "composite" : true, + "clientRole" : false, + "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983" + }, + "requiredCredentials" : [ "password" ], + "otpPolicyType" : "totp", + "otpPolicyAlgorithm" : "HmacSHA1", + "otpPolicyInitialCounter" : 0, + "otpPolicyDigits" : 6, + "otpPolicyLookAheadWindow" : 1, + "otpPolicyPeriod" : 30, + "otpSupportedApplications" : [ "FreeOTP", "Google Authenticator" ], + "webAuthnPolicyRpEntityName" : "keycloak", + "webAuthnPolicySignatureAlgorithms" : [ "ES256" ], + "webAuthnPolicyRpId" : "", + "webAuthnPolicyAttestationConveyancePreference" : "not specified", + "webAuthnPolicyAuthenticatorAttachment" : "not specified", + "webAuthnPolicyRequireResidentKey" : "not specified", + "webAuthnPolicyUserVerificationRequirement" : "not specified", + "webAuthnPolicyCreateTimeout" : 0, + "webAuthnPolicyAvoidSameAuthenticatorRegister" : false, + "webAuthnPolicyAcceptableAaguids" : [ ], + "webAuthnPolicyPasswordlessRpEntityName" : "keycloak", + "webAuthnPolicyPasswordlessSignatureAlgorithms" : [ "ES256" ], + "webAuthnPolicyPasswordlessRpId" : "", + "webAuthnPolicyPasswordlessAttestationConveyancePreference" : "not specified", + "webAuthnPolicyPasswordlessAuthenticatorAttachment" : "not specified", + "webAuthnPolicyPasswordlessRequireResidentKey" : "not specified", + "webAuthnPolicyPasswordlessUserVerificationRequirement" : "not specified", + "webAuthnPolicyPasswordlessCreateTimeout" : 0, + "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister" : false, + "webAuthnPolicyPasswordlessAcceptableAaguids" : [ ], + "users" : [ { + "id" : "52cddd46-251c-4534-acc8-0580eeafb577", + "createdTimestamp" : 1684736014759, + "username" : "admin", + "enabled" : true, + "totp" : false, + "emailVerified" : true, + "firstName" : "Dataverse", + "lastName" : "Admin", + "email" : "dataverse-admin@mailinator.com", + "credentials" : [ { + "id" : "28f1ece7-26fb-40f1-9174-5ffce7b85c0a", + "type" : "password", + "userLabel" : "Set to \"admin\"", + "createdDate" : 1684736057302, + "secretData" : "{\"value\":\"ONI7fl6BmooVTUgwN1W3m7hsRjMAYEr2l+Fp5+7IOYw1iIntwvZ3U3W0ZBcCFJ7uhcKqF101+rueM3dZfoshPQ==\",\"salt\":\"Hj7co7zYVei7xwx8EaYP3A==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-test" ], + "notBefore" : 0, + "groups" : [ "/admins" ] + }, { + "id" : "a3d8e76d-7e7b-42dc-bbd7-4258818a8a1b", + "createdTimestamp" : 1684755806552, + "username" : "affiliate", + "enabled" : true, + "totp" : false, + "emailVerified" : true, + "firstName" : "Dataverse", + "lastName" : "Affiliate", + "email" : "dataverse-affiliate@mailinator.com", + "credentials" : [ { + "id" : "31c8eb1e-b2a8-4f86-833b-7c0536cd61a1", + "type" : "password", + "userLabel" : "My password", + "createdDate" : 1684755821743, + "secretData" : "{\"value\":\"T+RQ4nvmjknj7ds8NU7782j6PJ++uCu98zNoDQjIe9IKXah+13q4EcXO9IHmi2BJ7lgT0OIzwIoac4JEQLxhjQ==\",\"salt\":\"fnRmE9WmjAp4tlvGh/bxxQ==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-test" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "e5531496-cfb8-498c-a902-50c98d649e79", + "createdTimestamp" : 1684755721064, + "username" : "curator", + "enabled" : true, + "totp" : false, + "emailVerified" : true, + "firstName" : "Dataverse", + "lastName" : "Curator", + "email" : "dataverse-curator@mailinator.com", + "credentials" : [ { + "id" : "664546b4-b936-45cf-a4cf-5e98b743fc7f", + "type" : "password", + "userLabel" : "My password", + "createdDate" : 1684755740776, + "secretData" : "{\"value\":\"AvVqybCNtCBVAdLEeJKresy9tc3c4BBUQvu5uHVQw4IjVagN6FpKGlDEKOrxhzdSM8skEvthOEqJkloPo1w+NQ==\",\"salt\":\"2em2DDRRlNEYsNR3xDqehw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-test" ], + "notBefore" : 0, + "groups" : [ "/curators" ] + }, { + "id" : "c0082e7e-a3e9-45e6-95e9-811a34adce9d", + "createdTimestamp" : 1684755585802, + "username" : "user", + "enabled" : true, + "totp" : false, + "emailVerified" : true, + "firstName" : "Dataverse", + "lastName" : "User", + "email" : "dataverse-user@mailinator.com", + "credentials" : [ { + "id" : "00d6d67f-2e30-4da6-a567-bec38a1886a0", + "type" : "password", + "userLabel" : "My password", + "createdDate" : 1684755599597, + "secretData" : "{\"value\":\"z991rnjznAgosi5nX962HjM8/gN5GLJTdrlvi6G9cj8470X2/oZUb4Lka6s8xImgtEloCgWiKqH0EH9G4Y3a5A==\",\"salt\":\"/Uz7w+2IqDo+fQUGqxjVHw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-test" ], + "notBefore" : 0, + "groups" : [ "/members" ] + } ], + "scopeMappings" : [ { + "clientScope" : "offline_access", + "roles" : [ "offline_access" ] + } ], + "clientScopeMappings" : { + "account" : [ { + "client" : "account-console", + "roles" : [ "manage-account" ] + } ] + }, + "clients" : [ { + "id" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "clientId" : "account", + "name" : "${client_account}", + "rootUrl" : "${authBaseUrl}", + "baseUrl" : "/realms/test/account/", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ "/realms/test/account/*" ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { + "post.logout.redirect.uris" : "+" + }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "5d99f721-027c-478d-867d-61114e0a8192", + "clientId" : "account-console", + "name" : "${client_account-console}", + "rootUrl" : "${authBaseUrl}", + "baseUrl" : "/realms/test/account/", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ "/realms/test/account/*" ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { + "post.logout.redirect.uris" : "+", + "pkce.code.challenge.method" : "S256" + }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "protocolMappers" : [ { + "id" : "e181a0ce-9a04-4468-a38a-aaef9f78f989", + "name" : "audience resolve", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-audience-resolve-mapper", + "consentRequired" : false, + "config" : { } + } ], + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "5eccc178-121e-4d0f-bcb2-04ae3c2e52ed", + "clientId" : "admin-cli", + "name" : "${client_admin-cli}", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : false, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : true, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "b57d62bb-77ff-42bd-b8ff-381c7288f327", + "clientId" : "broker", + "name" : "${client_broker}", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : true, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : false, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "clientId" : "realm-management", + "name" : "${client_realm-management}", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : true, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : false, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "bf7cf550-3875-4f97-9878-b2419a854058", + "clientId" : "security-admin-console", + "name" : "${client_security-admin-console}", + "rootUrl" : "${authAdminUrl}", + "baseUrl" : "/admin/test/console/", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ "/admin/test/console/*" ], + "webOrigins" : [ "+" ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { + "post.logout.redirect.uris" : "+", + "pkce.code.challenge.method" : "S256" + }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "protocolMappers" : [ { + "id" : "ff845e16-e200-4894-ab51-37d8b9f2a445", + "name" : "locale", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "locale", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "locale", + "jsonType.label" : "String" + } + } ], + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "9c27faa8-4b8d-4ad9-9cd1-880032ef06aa", + "clientId" : "test", + "name" : "A Test Client", + "description" : "Use for hacking and testing away a confidential client", + "rootUrl" : "", + "adminUrl" : "", + "baseUrl" : "", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "secret" : "94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8", + "redirectUris" : [ "*" ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : true, + "serviceAccountsEnabled" : false, + "publicClient" : false, + "frontchannelLogout" : true, + "protocol" : "openid-connect", + "attributes" : { + "oidc.ciba.grant.enabled" : "false", + "client.secret.creation.time" : "1684735831", + "backchannel.logout.session.required" : "true", + "display.on.consent.screen" : "false", + "oauth2.device.authorization.grant.enabled" : "false", + "backchannel.logout.revoke.offline.tokens" : "false" + }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : true, + "nodeReRegistrationTimeout" : -1, + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + } ], + "clientScopes" : [ { + "id" : "72f29e57-92fa-437b-828c-2b9d6fe56192", + "name" : "address", + "description" : "OpenID Connect built-in scope: address", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${addressScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "59581aea-70d6-4ee8-bec2-1fea5fc497ae", + "name" : "address", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-address-mapper", + "consentRequired" : false, + "config" : { + "user.attribute.formatted" : "formatted", + "user.attribute.country" : "country", + "user.attribute.postal_code" : "postal_code", + "userinfo.token.claim" : "true", + "user.attribute.street" : "street", + "id.token.claim" : "true", + "user.attribute.region" : "region", + "access.token.claim" : "true", + "user.attribute.locality" : "locality" + } + } ] + }, { + "id" : "f515ec81-3c1b-4d4d-b7a2-e7e8d47b6447", + "name" : "roles", + "description" : "OpenID Connect scope for add user roles to the access token", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "false", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${rolesScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "26d299a8-69e2-4864-9595-17a5b417fc61", + "name" : "realm roles", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-realm-role-mapper", + "consentRequired" : false, + "config" : { + "user.attribute" : "foo", + "access.token.claim" : "true", + "claim.name" : "realm_access.roles", + "jsonType.label" : "String", + "multivalued" : "true" + } + }, { + "id" : "d2998083-a8db-4f4e-9aaa-9cad68d65b97", + "name" : "audience resolve", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-audience-resolve-mapper", + "consentRequired" : false, + "config" : { } + }, { + "id" : "7a4cb2e5-07a0-4c16-a024-71df7ddd6868", + "name" : "client roles", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-client-role-mapper", + "consentRequired" : false, + "config" : { + "user.attribute" : "foo", + "access.token.claim" : "true", + "claim.name" : "resource_access.${client_id}.roles", + "jsonType.label" : "String", + "multivalued" : "true" + } + } ] + }, { + "id" : "8f1eafef-92d6-434e-b9ec-6edec1fddd0a", + "name" : "offline_access", + "description" : "OpenID Connect built-in scope: offline_access", + "protocol" : "openid-connect", + "attributes" : { + "consent.screen.text" : "${offlineAccessScopeConsentText}", + "display.on.consent.screen" : "true" + } + }, { + "id" : "c03095aa-b656-447a-9767-0763c2ccb070", + "name" : "acr", + "description" : "OpenID Connect scope for add acr (authentication context class reference) to the token", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "false", + "display.on.consent.screen" : "false" + }, + "protocolMappers" : [ { + "id" : "948b230c-56d0-4000-937c-841cd395d3f9", + "name" : "acr loa level", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-acr-mapper", + "consentRequired" : false, + "config" : { + "id.token.claim" : "true", + "access.token.claim" : "true" + } + } ] + }, { + "id" : "cdf35f63-8ec7-41a0-ae12-f05d415818cc", + "name" : "phone", + "description" : "OpenID Connect built-in scope: phone", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${phoneScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "ba4348ff-90b1-4e09-89a8-e5c08b04d3d1", + "name" : "phone number", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "phoneNumber", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "phone_number", + "jsonType.label" : "String" + } + }, { + "id" : "e6cceae5-8392-4348-b302-f610ece6056e", + "name" : "phone number verified", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "phoneNumberVerified", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "phone_number_verified", + "jsonType.label" : "boolean" + } + } ] + }, { + "id" : "4318001c-2970-41d3-91b9-e31c08569872", + "name" : "email", + "description" : "OpenID Connect built-in scope: email", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${emailScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "406d02a6-866a-4962-8838-e8c58ada1505", + "name" : "email", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "email", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "email", + "jsonType.label" : "String" + } + }, { + "id" : "33baabc1-9bf2-42e4-8b8e-a53c13f0b744", + "name" : "email verified", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "emailVerified", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "email_verified", + "jsonType.label" : "boolean" + } + } ] + }, { + "id" : "5277a84f-d727-4c64-8432-d513127beee1", + "name" : "profile", + "description" : "OpenID Connect built-in scope: profile", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${profileScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "0a609875-2678-4056-93ef-dd5c03e6059d", + "name" : "given name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "firstName", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "given_name", + "jsonType.label" : "String" + } + }, { + "id" : "7c510d18-07ee-4b78-8acd-24b777d11b3c", + "name" : "website", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "website", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "website", + "jsonType.label" : "String" + } + }, { + "id" : "0bb6d0ea-195f-49e8-918c-c419a26a661c", + "name" : "username", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "username", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "preferred_username", + "jsonType.label" : "String" + } + }, { + "id" : "5f1e644c-1acf-440c-b1a6-b5f65bcebfd9", + "name" : "profile", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "profile", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "profile", + "jsonType.label" : "String" + } + }, { + "id" : "c710bdb2-6cfd-4f60-9c4e-730188fc62f7", + "name" : "family name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "lastName", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "family_name", + "jsonType.label" : "String" + } + }, { + "id" : "012d5038-0e13-42ba-9df7-2487c8e2eead", + "name" : "nickname", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "nickname", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "nickname", + "jsonType.label" : "String" + } + }, { + "id" : "21590b19-517d-4b6d-92f6-d4f71238677e", + "name" : "updated at", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "updatedAt", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "updated_at", + "jsonType.label" : "long" + } + }, { + "id" : "e4cddca7-1360-42f3-9854-da6cbe00c71e", + "name" : "birthdate", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "birthdate", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "birthdate", + "jsonType.label" : "String" + } + }, { + "id" : "afee328f-c64c-43e6-80d0-be2721c2ed0e", + "name" : "locale", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "locale", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "locale", + "jsonType.label" : "String" + } + }, { + "id" : "780a1e2c-5b63-46f4-a5bf-dc3fd8ce0cbb", + "name" : "full name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-full-name-mapper", + "consentRequired" : false, + "config" : { + "id.token.claim" : "true", + "access.token.claim" : "true", + "userinfo.token.claim" : "true" + } + }, { + "id" : "aeebffff-f776-427e-83ed-064707ffce57", + "name" : "zoneinfo", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "zoneinfo", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "zoneinfo", + "jsonType.label" : "String" + } + }, { + "id" : "b3e840a2-1794-4da1-bf69-31905cbff0d6", + "name" : "middle name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "middleName", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "middle_name", + "jsonType.label" : "String" + } + }, { + "id" : "0607e0e4-4f7f-4214-996d-3599772ce1c7", + "name" : "picture", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "picture", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "picture", + "jsonType.label" : "String" + } + }, { + "id" : "426a609b-4e28-4132-af0d-13297b8cb63a", + "name" : "gender", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "gender", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "gender", + "jsonType.label" : "String" + } + } ] + }, { + "id" : "a1ebde82-ce21-438f-a3ad-261d3eeb1c01", + "name" : "role_list", + "description" : "SAML role list", + "protocol" : "saml", + "attributes" : { + "consent.screen.text" : "${samlRoleListScopeConsentText}", + "display.on.consent.screen" : "true" + }, + "protocolMappers" : [ { + "id" : "64653ac7-7ffc-4f7c-a589-03e3b68bbd25", + "name" : "role list", + "protocol" : "saml", + "protocolMapper" : "saml-role-list-mapper", + "consentRequired" : false, + "config" : { + "single" : "false", + "attribute.nameformat" : "Basic", + "attribute.name" : "Role" + } + } ] + }, { + "id" : "aeb5b852-dfec-4e67-9d9e-104abe9b3bf2", + "name" : "web-origins", + "description" : "OpenID Connect scope for add allowed web origins to the access token", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "false", + "display.on.consent.screen" : "false", + "consent.screen.text" : "" + }, + "protocolMappers" : [ { + "id" : "e2fa8437-a0f1-46fc-af9c-c40fc09cd6a1", + "name" : "allowed web origins", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-allowed-origins-mapper", + "consentRequired" : false, + "config" : { } + } ] + }, { + "id" : "4fecd0d7-d4ad-457e-90f2-c7202bf01ff5", + "name" : "microprofile-jwt", + "description" : "Microprofile - JWT built-in scope", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "false" + }, + "protocolMappers" : [ { + "id" : "a9536634-a9f6-4ed5-a8e7-8379d3b002ca", + "name" : "upn", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "username", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "upn", + "jsonType.label" : "String" + } + }, { + "id" : "2ce1a702-9458-4926-9b8a-f82c07215755", + "name" : "groups", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-realm-role-mapper", + "consentRequired" : false, + "config" : { + "multivalued" : "true", + "user.attribute" : "foo", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "groups", + "jsonType.label" : "String" + } + } ] + } ], + "defaultDefaultClientScopes" : [ "role_list", "profile", "email", "roles", "web-origins", "acr" ], + "defaultOptionalClientScopes" : [ "offline_access", "address", "phone", "microprofile-jwt" ], + "browserSecurityHeaders" : { + "contentSecurityPolicyReportOnly" : "", + "xContentTypeOptions" : "nosniff", + "xRobotsTag" : "none", + "xFrameOptions" : "SAMEORIGIN", + "contentSecurityPolicy" : "frame-src 'self'; frame-ancestors 'self'; object-src 'none';", + "xXSSProtection" : "1; mode=block", + "strictTransportSecurity" : "max-age=31536000; includeSubDomains" + }, + "smtpServer" : { }, + "eventsEnabled" : false, + "eventsListeners" : [ "jboss-logging" ], + "enabledEventTypes" : [ ], + "adminEventsEnabled" : false, + "adminEventsDetailsEnabled" : false, + "identityProviders" : [ ], + "identityProviderMappers" : [ ], + "components" : { + "org.keycloak.services.clientregistration.policy.ClientRegistrationPolicy" : [ { + "id" : "8115796f-8f1f-4d6a-88f8-ca2938451260", + "name" : "Allowed Client Scopes", + "providerId" : "allowed-client-templates", + "subType" : "authenticated", + "subComponents" : { }, + "config" : { + "allow-default-scopes" : [ "true" ] + } + }, { + "id" : "044bd055-714d-478e-aa93-303d2161c427", + "name" : "Allowed Protocol Mapper Types", + "providerId" : "allowed-protocol-mappers", + "subType" : "authenticated", + "subComponents" : { }, + "config" : { + "allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-full-name-mapper" ] + } + }, { + "id" : "be465734-3b0f-4370-a144-73db756e23f8", + "name" : "Allowed Protocol Mapper Types", + "providerId" : "allowed-protocol-mappers", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper" ] + } + }, { + "id" : "42a2f64d-ac9e-4221-9cf6-40ff8c868629", + "name" : "Trusted Hosts", + "providerId" : "trusted-hosts", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "host-sending-registration-request-must-match" : [ "true" ], + "client-uris-must-match" : [ "true" ] + } + }, { + "id" : "7ca08915-6c33-454c-88f2-20e1d6553b26", + "name" : "Max Clients Limit", + "providerId" : "max-clients", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "max-clients" : [ "200" ] + } + }, { + "id" : "f01f2b6f-3f01-4d01-b2f4-70577c6f599c", + "name" : "Allowed Client Scopes", + "providerId" : "allowed-client-templates", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "allow-default-scopes" : [ "true" ] + } + }, { + "id" : "516d7f21-f21a-4690-831e-36ad313093b2", + "name" : "Consent Required", + "providerId" : "consent-required", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { } + }, { + "id" : "c79df6a0-d4d8-4866-b9e6-8ddb5d1bd38e", + "name" : "Full Scope Disabled", + "providerId" : "scope", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { } + } ], + "org.keycloak.userprofile.UserProfileProvider" : [ { + "id" : "cf47a21f-c8fb-42f2-9bff-feca967db183", + "providerId" : "declarative-user-profile", + "subComponents" : { }, + "config" : { } + } ], + "org.keycloak.keys.KeyProvider" : [ { + "id" : "6b4a2281-a9e8-43ab-aee7-190ae91b2842", + "name" : "aes-generated", + "providerId" : "aes-generated", + "subComponents" : { }, + "config" : { + "kid" : [ "47b9c2c2-32dc-4317-bd8b-1c4e5bb740ca" ], + "secret" : [ "9VWsVSqbj5zWa8Mq-rRzOw" ], + "priority" : [ "100" ] + } + }, { + "id" : "68e2d2b0-4976-480f-ab76-f84a17686b05", + "name" : "rsa-enc-generated", + "providerId" : "rsa-enc-generated", + "subComponents" : { }, + "config" : { + "privateKey" : [ "MIIEpQIBAAKCAQEAwuIcVVJDncorsQcFef4M/J9dsaNNmwEv/+4pCSZuco7IlA9uCfvwjYgfwQlWoCHCc7JFEtUOXhpLNR0SJ9w2eCC9A/0horjLmiVGU5sGACGrAxSgipt399k83mtkPBTikT1BXumPrX51ovdEPVPQSO0hIBwFn4ZDwA9P/00jNzzswyLC2UDdQrwIjm2xWjq1X82d8mL3+Yp8lF9qD1w305+XPiqCC+TUunKsuCQq5sddet+UoCDsFQyxsJi6cWJrryDvQmiDgM2wm68jn6hyzDE76J1az0wKEGqoMEwIy0juqZCyAqgsm3xA+zHpTcI3EyTwDGpMvWNJp8AWqXPNaQIDAQABAoIBAAethL1+n/6WpUBEaoHcVrq5/2+vo0+dfTyVZNKRFqtG0WOWPzOflFd1HZV7YVPuJI+uPi8ANmsnbh9YcaYg9JiTZ0hMZ++giBf0ID2hZxv995NyXnf7fkoFKghevYG+9mVPtHRmxKlKiPFWfHQjP1ACNKAD2UZdcdbzxicaIkPV/hP996mZA3xaaudggAJq7u/W67H2Q6ofGqW4TI5241d8T+6yobbvXRe4n8FKz4eK2aZv+N+zwh5JDMsJ8050+lCDsyoyakEPf+4veuPkewx4FemAiotDNcmoUQSDL26wLw8kk1uZ9JY0M88OL5pMyBuxTqy0F6BWBltq80mlefECgYEA4vZ8Agu2plXOzWASn0dyhCel3QoeUqNY8D8A+0vK9qWxUE9jMG13jAZmsL2I38SuwRN1DhJezbrn4QTuxTukxgSjLDv/pBp9UnXnCz/fg4yPTYsZ0zHqTMbwvdtfIzBHTCYyIJ+unxVYoenC0XZKSQXA3NN2zNqYpLhjStWdEZECgYEA29DznJxpDZsRUieRxFgZ+eRCjbQ9Q2A46preqMo1KOZ6bt9avxG3uM7pUC+UOeIizeRzxPSJ2SyptYPzdaNwKN3Lq+RhjHe1zYLngXb0CIQaRwNHqePxXF1sg0dTbmcxf+Co7yPG+Nd5nrQq9SQHC3tLTyL6x3VU/yAfMQqUklkCgYEAyVl8iGAV6RkE/4R04OOEv6Ng7WkVn6CUvYZXe5kw9YHnfWUAjS0AOrRPFAsBy+r0UgvN8+7uNjvTjPhQT5/rPVVN4WdVEyQA/E/m6j7/LvhbBaMbBRcqUnTHjNd6XoBtMCxOmkyvoShR2krE8AiuPHwjLoVXxsNDWhbO18wMrVECgYEAlmkICOXNzI2K8Jg62gse2yshjy0BrpSs3XtTWFPkxDPRGwSiZ5OMD10lsMSdvG3MOu5TeTWLDZvOFHJRqPFI0e3Sa7A+P4u6TwF/v8rRePJLuMO5ybo7cWRL2Bh6MlVSPZpQfjIQ+D0Y70uBCXS5jVW0VlYtG0Zh/qDQNxJyTyECgYEAuRINlZ0ag+1QTITapSatbFWd/KquGLpMjZyF4k5gVHs+4zHnnTi1YIDUInp1FJBqKD27z2byy7KFgbMBZQmsDs8i4fgzQrJHe3D4WFFHCjiClbeReejbas9bOnqhSQCiIy1Ck8vMAriAtctSA/g/qq6dQApSgcWaKvTVL2Ywa7E=" ], + "keyUse" : [ "ENC" ], + "certificate" : [ "MIIClzCCAX8CBgGIQhOIijANBgkqhkiG9w0BAQsFADAPMQ0wCwYDVQQDDAR0ZXN0MB4XDTIzMDUyMjA2MDczNloXDTMzMDUyMjA2MDkxNlowDzENMAsGA1UEAwwEdGVzdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMLiHFVSQ53KK7EHBXn+DPyfXbGjTZsBL//uKQkmbnKOyJQPbgn78I2IH8EJVqAhwnOyRRLVDl4aSzUdEifcNnggvQP9IaK4y5olRlObBgAhqwMUoIqbd/fZPN5rZDwU4pE9QV7pj61+daL3RD1T0EjtISAcBZ+GQ8APT/9NIzc87MMiwtlA3UK8CI5tsVo6tV/NnfJi9/mKfJRfag9cN9Oflz4qggvk1LpyrLgkKubHXXrflKAg7BUMsbCYunFia68g70Jog4DNsJuvI5+ocswxO+idWs9MChBqqDBMCMtI7qmQsgKoLJt8QPsx6U3CNxMk8AxqTL1jSafAFqlzzWkCAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAIEIfjqOr2m+8s2RR8VW/nBgOgu9HtPRda4qNhGbgBkZ8NDy7TwHqlHo1ujKW5RO438pRyLJmOibWN4a/rkUsSjin6vgy4l8KpQy+7a4cQCQHyl34TmPjbtiw1jKgiOjzRQY54NVwIJNMIMc1ZyQo4u0U30/FxgUv6akXfS5O1ePD+5xKOOC/Af9AletjhQMPwVxXDwFqfQf/p+SM4Pyn4L633MESfDrH8v9FjJd0lV5ZlEI4hpPtnbi9U+CInqCy3VDNlZjsXswaDRujjg3LERfOMvCgj+Dck3FzWG7EiCwXWNEPvdMzv4w7M6KXuiPPQkST8DUWjgkjUCeLBzT3yw==" ], + "priority" : [ "100" ], + "algorithm" : [ "RSA-OAEP" ] + } + }, { + "id" : "728769a3-99a4-4cca-959d-28181dfee7e8", + "name" : "rsa-generated", + "providerId" : "rsa-generated", + "subComponents" : { }, + "config" : { + "privateKey" : [ "MIIEowIBAAKCAQEAxIszQCv8bX3sKXJVtuLJV6cH/uhkzxcTEIcDe7y2Y2SFM0x2nF6wRLk8QkvIrRmelilegUIJttqZxLXMpxwUJGizehHQMrOCzNoGBZdVanoK7nNa5+FOYtlvL4GxNfwzS36sp3PnKQiGv5Q7RGuPthjLFfqTmYx/7GTDJC4vLEW5S01Vy/Xc9FE4FsT0hnm91lRWjppc9893M5QUy/TPu8udIuNV87Ko5yiIxQqcPiAQXJaN4CyGaDcYhhzzHdxVptIk2FvtxhpmNxrbtmBCx/o9/rBDQNTis8Ex6ItWC2PvC17UPvyOcZ4Fv/qO0L6JZ0mrpH95CeDU1kEP+KKZrwIDAQABAoIBAGGl6SYiVG1PyTQEXqqY/UCjt3jBnEg5ZhrpgWUKKrGyAO2uOSXSc5AJWfN0NHUwC9b+IbplhW8IJ6qQSmfiLu2x6S2mSQLPphZB4gkIGYNntCOpQ0p+aZP6BGAddt5j+VYyTvR5RKlh15S6QEHrkMB/i/LVBl0c7XeUzlEc8wnyj8DGvlmpcQzIcbWfqEZ/FciDdKGNN0M4V/r1uQiOUVZ69SWDBBwu41YwF7PYUsX83q8zn0nBeMqz0ggSf33lW4w31fox9c7EjIF01gPArE5uT+d+AwjVKHpd08LWGR9W9NSXVOPUKkzOM+PyvKGvzjMnlrm/feqowKQbL2q/GP0CgYEA/EsrvUojkFIWxHc19KJdJvqlYgLeWq6P/J7UmHgpl+S3nG6b9HH4/aM/ICDa5hxd5bmP5p2V3EuZWnyb6/QB5eipC7Ss3oM7XeS/PwvTp6NTC1fypx2zHKse3iuLeCGneRxiw15mB02ArJ/qJw/VSQK2J7RiR4+b6HYpdzQnIysCgYEAx25dTQqskQqsx/orJzuUqfNv/C0W4vqfz1eL3akFrdK+YqghXKFsDmh61JpTrTKnRLAdQeyOrhKwbNsdxSEEaeeLayKLVlimoFXGd/LZb5LQiwFcrvTzhnB+FLmFgqTnuLkpfY1woHEwSW9TpJewjbT9S6g0L2uh223nVXuLMY0CgYEA3pMOlmMGtvbEoTSuRBDNb2rmZm4zbfrcijgxRAWWZCtiFL68FU5LJLBVK2nw09sot1cabZCOuhdzxhFymRneZs73+5y8eV17DV2VnvA3HIiI5dQD/YzFDECm7ceqtiOylLUHKGZqSn0ETMaTkzxzpIKg4qxPm+RE3jMIZ+J5uJsCgYBk2iUIrtsxxgo2Xwavomu9vkPlbQ/j3QYwHn+2qqEalDZ/QbMNWvyAFMn49cpXDgSUsdM54V0OHpllkzFs3ROUUumoViHMmqw47OefBQp8Z+xaP2gVef4lAIJiDKe9t5MPUWPwADTyjgrzN/8+fw9juiFVv0wUpwOFKgEQs5diiQKBgC6RpZESc5Nl4nHrDvIl5n/zYED6BaXoLl15NhcoBudt5SIRO/RpvBW69A7aE/UK6p7WXjq4mP1ssIWz4KgATCoXUgYvn0a7Ql79r/CMce6/FvcuweED6u6bD0kdXuYhe8fR9IPmLfnnb4Cx3JOJeRZbiBSP5HOZJ7nsKibxcgPm" ], + "keyUse" : [ "SIG" ], + "certificate" : [ "MIIClzCCAX8CBgGIQhOHjjANBgkqhkiG9w0BAQsFADAPMQ0wCwYDVQQDDAR0ZXN0MB4XDTIzMDUyMjA2MDczNloXDTMzMDUyMjA2MDkxNlowDzENMAsGA1UEAwwEdGVzdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMSLM0Ar/G197ClyVbbiyVenB/7oZM8XExCHA3u8tmNkhTNMdpxesES5PEJLyK0ZnpYpXoFCCbbamcS1zKccFCRos3oR0DKzgszaBgWXVWp6Cu5zWufhTmLZby+BsTX8M0t+rKdz5ykIhr+UO0Rrj7YYyxX6k5mMf+xkwyQuLyxFuUtNVcv13PRROBbE9IZ5vdZUVo6aXPfPdzOUFMv0z7vLnSLjVfOyqOcoiMUKnD4gEFyWjeAshmg3GIYc8x3cVabSJNhb7cYaZjca27ZgQsf6Pf6wQ0DU4rPBMeiLVgtj7wte1D78jnGeBb/6jtC+iWdJq6R/eQng1NZBD/iima8CAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAe0Bo1UpGfpOlJiVhp0XWExm8bdxFgXOU2M5XeZBsWAqBehvJkzn+tbAtlVNiIiN58XFFpH+xLZ2nJIZR5FHeCD3bYAgK72j5k45HJI95vPyslelfT/m3Np78+1iUa1U1WxN40JaowP1EeTkk5O8Pk4zTQ1Ne1usmKd+SJxI1KWN0kKuVFMmdNRb5kQKWeQvOSlWl7rd4bvHGvVnxgcPC1bshEJKRt+VpaUjpm6CKd8C3Kt7IWfIX4HTVhKZkmLn7qv6aSfwWelwZfLdaXcLXixqzqNuUk/VWbF9JT4iiag9F3mt7xryIkoRp1AEjCA82HqK72F4JCFyOhCiGrMfKJw==" ], + "priority" : [ "100" ] + } + }, { + "id" : "f30af2d2-d042-43b8-bc6d-22f6bab6934c", + "name" : "hmac-generated", + "providerId" : "hmac-generated", + "subComponents" : { }, + "config" : { + "kid" : [ "6f0d9688-e974-42b4-9d84-8d098c51007c" ], + "secret" : [ "8nruwD66Revr9k21e-BHtcyvNzAMFOsstxSAB0Gdy2qe2qGRm2kYOwsPzrH9ZQSdj2041SraKo6a3SHvCyTBAQ" ], + "priority" : [ "100" ], + "algorithm" : [ "HS256" ] + } + } ] + }, + "internationalizationEnabled" : false, + "supportedLocales" : [ ], + "authenticationFlows" : [ { + "id" : "94c65ba1-ba50-4be2-94c4-de656145eb67", + "alias" : "Account verification options", + "description" : "Method with which to verity the existing account", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "idp-email-verification", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "ALTERNATIVE", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "Verify Existing Account by Re-authentication", + "userSetupAllowed" : false + } ] + }, { + "id" : "3b706ddf-c4b6-498a-803c-772878bc9bc3", + "alias" : "Authentication Options", + "description" : "Authentication options.", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "basic-auth", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "basic-auth-otp", + "authenticatorFlow" : false, + "requirement" : "DISABLED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "auth-spnego", + "authenticatorFlow" : false, + "requirement" : "DISABLED", + "priority" : 30, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "9ea0b8f6-882c-45ad-9110-78adf5a5d233", + "alias" : "Browser - Conditional OTP", + "description" : "Flow to determine if the OTP is required for the authentication", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "auth-otp-form", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "99c5ba83-b585-4601-b740-1a26670bf4e9", + "alias" : "Direct Grant - Conditional OTP", + "description" : "Flow to determine if the OTP is required for the authentication", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "direct-grant-validate-otp", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "65b73dec-7dd1-4de8-b542-a023b7104afc", + "alias" : "First broker login - Conditional OTP", + "description" : "Flow to determine if the OTP is required for the authentication", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "auth-otp-form", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "9a26b76f-da95-43f1-8da3-16c4a0654f07", + "alias" : "Handle Existing Account", + "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "idp-confirm-link", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "Account verification options", + "userSetupAllowed" : false + } ] + }, { + "id" : "0a77285e-d7d5-4b6c-aa9a-3eadb5e7e3d3", + "alias" : "Reset - Conditional OTP", + "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "reset-otp", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "cb6c0b3b-2f5f-4493-9d14-6130f8b58dd7", + "alias" : "User creation or linking", + "description" : "Flow for the existing/non-existing user alternatives", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticatorConfig" : "create unique user config", + "authenticator" : "idp-create-user-if-unique", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "ALTERNATIVE", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "Handle Existing Account", + "userSetupAllowed" : false + } ] + }, { + "id" : "0fd3db1b-e93d-4768-82ca-a1498ddc11d0", + "alias" : "Verify Existing Account by Re-authentication", + "description" : "Reauthentication of existing account", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "idp-username-password-form", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "CONDITIONAL", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "First broker login - Conditional OTP", + "userSetupAllowed" : false + } ] + }, { + "id" : "86610e70-f9f5-4c11-8a9e-9de1770565fb", + "alias" : "browser", + "description" : "browser based authentication", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "auth-cookie", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "auth-spnego", + "authenticatorFlow" : false, + "requirement" : "DISABLED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "identity-provider-redirector", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 25, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "ALTERNATIVE", + "priority" : 30, + "autheticatorFlow" : true, + "flowAlias" : "forms", + "userSetupAllowed" : false + } ] + }, { + "id" : "f6aa23dd-8532-4d92-9780-3ea226481e3b", + "alias" : "clients", + "description" : "Base authentication for clients", + "providerId" : "client-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "client-secret", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "client-jwt", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "client-secret-jwt", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 30, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "client-x509", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 40, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "4d2caf65-1703-4ddb-8890-70232e91bcd8", + "alias" : "direct grant", + "description" : "OpenID Connect Resource Owner Grant", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "direct-grant-validate-username", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "direct-grant-validate-password", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "CONDITIONAL", + "priority" : 30, + "autheticatorFlow" : true, + "flowAlias" : "Direct Grant - Conditional OTP", + "userSetupAllowed" : false + } ] + }, { + "id" : "eaa20c41-5334-4fb4-8c45-fb9cc71f7f74", + "alias" : "docker auth", + "description" : "Used by Docker clients to authenticate against the IDP", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "docker-http-basic-authenticator", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "b9febfb1-f0aa-4590-b782-272a4aa11575", + "alias" : "first broker login", + "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticatorConfig" : "review profile config", + "authenticator" : "idp-review-profile", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "User creation or linking", + "userSetupAllowed" : false + } ] + }, { + "id" : "03bb6ff4-eccb-4f2f-8953-3769f78c3bf3", + "alias" : "forms", + "description" : "Username, password, otp and other auth forms.", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "auth-username-password-form", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "CONDITIONAL", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "Browser - Conditional OTP", + "userSetupAllowed" : false + } ] + }, { + "id" : "38385189-246b-4ea0-ac05-d49dfe1709da", + "alias" : "http challenge", + "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "no-cookie-redirect", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "Authentication Options", + "userSetupAllowed" : false + } ] + }, { + "id" : "1022f3c2-0469-41c9-861e-918908f103df", + "alias" : "registration", + "description" : "registration flow", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "registration-page-form", + "authenticatorFlow" : true, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : true, + "flowAlias" : "registration form", + "userSetupAllowed" : false + } ] + }, { + "id" : "00d36c3b-e1dc-41f8-bfd0-5f8c80ea07e8", + "alias" : "registration form", + "description" : "registration form", + "providerId" : "form-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "registration-user-creation", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "registration-profile-action", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 40, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "registration-password-action", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 50, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "registration-recaptcha-action", + "authenticatorFlow" : false, + "requirement" : "DISABLED", + "priority" : 60, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "4374c16e-8c65-4168-94c2-df1ab3f3e6ad", + "alias" : "reset credentials", + "description" : "Reset credentials for a user if they forgot their password or something", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "reset-credentials-choose-user", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "reset-credential-email", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "reset-password", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 30, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "CONDITIONAL", + "priority" : 40, + "autheticatorFlow" : true, + "flowAlias" : "Reset - Conditional OTP", + "userSetupAllowed" : false + } ] + }, { + "id" : "04d6ed6a-76c9-41fb-9074-bff8a80c2286", + "alias" : "saml ecp", + "description" : "SAML ECP Profile Authentication Flow", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "http-basic-authenticator", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + } ], + "authenticatorConfig" : [ { + "id" : "e7bad67d-1236-430a-a327-9194f9d1e2b0", + "alias" : "create unique user config", + "config" : { + "require.password.update.after.registration" : "false" + } + }, { + "id" : "287b5989-a927-4cf5-8067-74594ce19bc1", + "alias" : "review profile config", + "config" : { + "update.profile.on.first.login" : "missing" + } + } ], + "requiredActions" : [ { + "alias" : "CONFIGURE_TOTP", + "name" : "Configure OTP", + "providerId" : "CONFIGURE_TOTP", + "enabled" : true, + "defaultAction" : false, + "priority" : 10, + "config" : { } + }, { + "alias" : "terms_and_conditions", + "name" : "Terms and Conditions", + "providerId" : "terms_and_conditions", + "enabled" : false, + "defaultAction" : false, + "priority" : 20, + "config" : { } + }, { + "alias" : "UPDATE_PASSWORD", + "name" : "Update Password", + "providerId" : "UPDATE_PASSWORD", + "enabled" : true, + "defaultAction" : false, + "priority" : 30, + "config" : { } + }, { + "alias" : "UPDATE_PROFILE", + "name" : "Update Profile", + "providerId" : "UPDATE_PROFILE", + "enabled" : true, + "defaultAction" : false, + "priority" : 40, + "config" : { } + }, { + "alias" : "VERIFY_EMAIL", + "name" : "Verify Email", + "providerId" : "VERIFY_EMAIL", + "enabled" : true, + "defaultAction" : false, + "priority" : 50, + "config" : { } + }, { + "alias" : "delete_account", + "name" : "Delete Account", + "providerId" : "delete_account", + "enabled" : false, + "defaultAction" : false, + "priority" : 60, + "config" : { } + }, { + "alias" : "webauthn-register", + "name" : "Webauthn Register", + "providerId" : "webauthn-register", + "enabled" : true, + "defaultAction" : false, + "priority" : 70, + "config" : { } + }, { + "alias" : "webauthn-register-passwordless", + "name" : "Webauthn Register Passwordless", + "providerId" : "webauthn-register-passwordless", + "enabled" : true, + "defaultAction" : false, + "priority" : 80, + "config" : { } + }, { + "alias" : "update_user_locale", + "name" : "Update User Locale", + "providerId" : "update_user_locale", + "enabled" : true, + "defaultAction" : false, + "priority" : 1000, + "config" : { } + } ], + "browserFlow" : "browser", + "registrationFlow" : "registration", + "directGrantFlow" : "direct grant", + "resetCredentialsFlow" : "reset credentials", + "clientAuthenticationFlow" : "clients", + "dockerAuthenticationFlow" : "docker auth", + "attributes" : { + "cibaBackchannelTokenDeliveryMode" : "poll", + "cibaAuthRequestedUserHint" : "login_hint", + "oauth2DevicePollingInterval" : "5", + "clientOfflineSessionMaxLifespan" : "0", + "clientSessionIdleTimeout" : "0", + "clientOfflineSessionIdleTimeout" : "0", + "cibaInterval" : "5", + "cibaExpiresIn" : "120", + "oauth2DeviceCodeLifespan" : "600", + "parRequestUriLifespan" : "60", + "clientSessionMaxLifespan" : "0", + "frontendUrl" : "" + }, + "keycloakVersion" : "19.0.3", + "userManagedAccessAllowed" : false, + "clientProfiles" : { + "profiles" : [ ] + }, + "clientPolicies" : { + "policies" : [ ] + } +} \ No newline at end of file diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 30c55661a20..f00be57ea9c 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -100,8 +100,8 @@ services: - /mail:mode=770,size=128M,uid=1000,gid=1000 dev_keycloak: - container_name: "dev_keycloack" - image: 'quay.io/keycloak/keycloak:19.0' + container_name: "dev_keycloak" + image: 'quay.io/keycloak/keycloak:21.0' hostname: keycloak environment: - KEYCLOAK_ADMIN=kcadmin @@ -116,7 +116,7 @@ services: ports: - "8090:8090" volumes: - - './conf/keycloak/oidc-realm.json:/opt/keycloak/data/import/oidc-realm.json' + - './conf/keycloak/test-realm.json:/opt/keycloak/data/import/test-realm.json' networks: dataverse: From 947ed780d8a3300c9202e62853a0b810b1039b49 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 16:10:40 +0200 Subject: [PATCH 0212/1525] docs(dev,oidc): update new OIDC test realm description --- .../source/developers/remote-users.rst | 27 +++++++++++++++---- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/developers/remote-users.rst b/doc/sphinx-guides/source/developers/remote-users.rst index d8f90e9257f..a07f104d1c8 100755 --- a/doc/sphinx-guides/source/developers/remote-users.rst +++ b/doc/sphinx-guides/source/developers/remote-users.rst @@ -39,7 +39,7 @@ STOP! ``oidc-keycloak-auth-provider.json`` was changed from http://localhost:809 If you are working on the OpenID Connect (OIDC) user authentication flow, you do not need to connect to a remote provider (as explained in :doc:`/installation/oidc`) to test this feature. Instead, you can use the available configuration that allows you to run a test Keycloak OIDC identity management service locally through a Docker container. -(Please note! The client secret (``ss6gE8mODCDfqesQaSG3gwUwZqZt547E``) is hard-coded in ``oidc-realm.json`` and ``oidc-keycloak-auth-provider.json``. Do not use this config in production! This is only for developers.) +(Please note! The client secret (``94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8``) is hard-coded in ``test-realm.json`` and ``oidc-keycloak-auth-provider.json``. Do not use this config in production! This is only for developers.) You can find this configuration in ``conf/keycloak``. There are two options available in this directory to run a Keycloak container: bash script or docker-compose. @@ -55,15 +55,32 @@ Now load the configuration defined in ``oidc-keycloak-auth-provider.json`` into You should see the new provider, called "OIDC-Keycloak", under "Other options" on the Log In page. -You should be able to log into Keycloak with the following credentials: - -- username: kcuser -- password: kcpassword +You should be able to log into Keycloak with the one of the following credentials: + +.. list-table:: + + * - Username + - Password + - Group + * - admin + - admin + - admins + * - curator + - curator + - curators + * - user + - user + - members + * - affiliate + - affiliate + - \- In case you want to stop and remove the Keycloak container, just run the other available bash script: ``./rm-keycloak.sh`` +Note: the Keycloak admin to login at the admin console is ``kcadmin:kcpassword`` + ---- Previous: :doc:`unf/index` | Next: :doc:`geospatial` From 23a11718618fcc15daa3d6ddcfb18f0f49378e9d Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 16:13:47 +0200 Subject: [PATCH 0213/1525] test(oidc): update OIDC integration test with new realm This commit enables using Keycloak 20+ with this test by replacing the test realm and using the client to retrieve the access token on a side channel in a Keycloak 20+ compatible way. (The old one only worked for v19, this one is compatible with all) --- .../OIDCAuthenticationProviderFactoryIT.java | 71 ++++++++++++------- 1 file changed, 47 insertions(+), 24 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java index a5aa29cc083..88f70c53948 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -8,6 +8,8 @@ import edu.harvard.iq.dataverse.api.auth.doubles.BearerTokenKeyContainerRequestTestFake; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.mocks.MockAuthenticatedUser; @@ -17,6 +19,7 @@ import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.keycloak.OAuth2Constants; import org.keycloak.admin.client.Keycloak; import org.keycloak.admin.client.KeycloakBuilder; import org.mockito.InjectMocks; @@ -32,6 +35,7 @@ import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientSecret; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assumptions.assumeFalse; import static org.junit.jupiter.api.Assumptions.assumeTrue; @@ -47,18 +51,19 @@ @JvmSetting(key = JvmSettings.OIDC_AUTH_SERVER_URL, method = "getAuthUrl") class OIDCAuthenticationProviderFactoryIT { - // NOTE: the following values are taken from the realm import file! - static final String clientId = "oidc-client"; - static final String clientSecret = "ss6gE8mODCDfqesQaSG3gwUwZqZt547E"; - static final String realm = "oidc-realm"; - static final String adminUser = "kcuser"; + static final String clientId = "test"; + static final String clientSecret = "94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8"; + static final String realm = "test"; + static final String realmAdminUser = "admin"; + static final String realmAdminPassword = "admin"; + + static final String adminUser = "kcadmin"; static final String adminPassword = "kcpassword"; - static final String clientIdAdminCli = "admin-cli"; - // The realm JSON resides in conf/keycloak/oidc-realm.json and gets avail here using in pom.xml + // The realm JSON resides in conf/keycloak/test-realm.json and gets avail here using in pom.xml @Container - static KeycloakContainer keycloakContainer = new KeycloakContainer("quay.io/keycloak/keycloak:19.0") - .withRealmImportFile("keycloak/oidc-realm.json") + static KeycloakContainer keycloakContainer = new KeycloakContainer("quay.io/keycloak/keycloak:21.0") + .withRealmImportFile("keycloak/test-realm.json") .withAdminUsername(adminUser) .withAdminPassword(adminPassword); @@ -76,31 +81,44 @@ OIDCAuthProvider getProvider() throws Exception { return oidcAuthProvider; } - Keycloak getAdminClient() { - return KeycloakBuilder.builder() + // NOTE: This requires the "direct access grants" for the client to be enabled! + String getBearerTokenViaKeycloakAdminClient() throws Exception { + try (Keycloak keycloak = KeycloakBuilder.builder() .serverUrl(keycloakContainer.getAuthServerUrl()) + .grantType(OAuth2Constants.PASSWORD) .realm(realm) - .clientId(clientIdAdminCli) - .username(keycloakContainer.getAdminUsername()) - .password(keycloakContainer.getAdminPassword()) - .build(); - } - - String getBearerToken() throws Exception { - Keycloak keycloak = getAdminClient(); - return keycloak.tokenManager().getAccessTokenString(); + .clientId(clientId) + .clientSecret(clientSecret) + .username(realmAdminUser) + .password(realmAdminPassword) + .scope("openid") + .build()) { + return keycloak.tokenManager().getAccessTokenString(); + } } + /** + * This basic test covers configuring an OIDC provider via MPCONFIG and being able to use it. + */ @Test void testCreateProvider() throws Exception { + // given OIDCAuthProvider oidcAuthProvider = getProvider(); - String token = getBearerToken(); + String token = getBearerTokenViaKeycloakAdminClient(); assumeFalse(token == null); - Optional info = oidcAuthProvider.getUserInfo(new BearerAccessToken(token)); + Optional info = Optional.empty(); + + // when + try { + info = oidcAuthProvider.getUserInfo(new BearerAccessToken(token)); + } catch (OAuth2Exception e) { + System.out.println(e.getMessageBody()); + } + //then assertTrue(info.isPresent()); - assertEquals(adminUser, info.get().getPreferredUsername()); + assertEquals(realmAdminUser, info.get().getPreferredUsername()); } @Mock @@ -111,6 +129,11 @@ void testCreateProvider() throws Exception { @InjectMocks BearerTokenAuthMechanism bearerTokenAuthMechanism; + /** + * This test covers using an OIDC provider as authorization party when accessing the Dataverse API with a + * Bearer Token. See {@link BearerTokenAuthMechanism}. It needs to mock the auth services to avoid adding + * more dependencies. + */ @Test @JvmSetting(key = JvmSettings.FEATURE_FLAG, varArgs = "api-bearer-auth", value = "true") void testApiBearerAuth() throws Exception { @@ -120,7 +143,7 @@ void testApiBearerAuth() throws Exception { // given // Get the access token from the remote Keycloak in the container - String accessToken = getBearerToken(); + String accessToken = getBearerTokenViaKeycloakAdminClient(); assumeFalse(accessToken == null); OIDCAuthProvider oidcAuthProvider = getProvider(); From 844339dd6adc74977a6011f58cc01dd924b002cc Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 16:14:52 +0200 Subject: [PATCH 0214/1525] test(oidc): add third OIDC integration test for JSF login Using HtmlUnit as a browser replacement, enable testing if the Authorization Code Flow used when logging in via the UI works properly. --- pom.xml | 6 ++ .../OIDCAuthenticationProviderFactoryIT.java | 78 +++++++++++++++++++ 2 files changed, 84 insertions(+) diff --git a/pom.xml b/pom.xml index e9a9b9dd611..2ba01e5ab61 100644 --- a/pom.xml +++ b/pom.xml @@ -610,6 +610,12 @@ ${smallrye-mpconfig.version} test + + org.htmlunit + htmlunit + 3.2.0 + test + diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java index 88f70c53948..2c963e8df46 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -16,6 +16,13 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; +import org.htmlunit.FailingHttpStatusCodeException; +import org.htmlunit.WebClient; +import org.htmlunit.WebResponse; +import org.htmlunit.html.HtmlForm; +import org.htmlunit.html.HtmlInput; +import org.htmlunit.html.HtmlPage; +import org.htmlunit.html.HtmlSubmitInput; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -28,8 +35,11 @@ import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; +import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.regex.Pattern; +import java.util.stream.Collectors; import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientId; import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientSecret; @@ -166,4 +176,72 @@ void testApiBearerAuth() throws Exception { assertNotNull(lookedUpUser); assertEquals(user, lookedUpUser); } + + /** + * This test covers the {@link OIDCAuthProvider#buildAuthzUrl(String, String)} and + * {@link OIDCAuthProvider#getUserRecord(String, String, String)} methods that are used when + * a user authenticates via the JSF UI. It covers enabling PKCE, which is no hard requirement + * by the protocol, but might be required by some provider (as seen with Microsoft Azure AD). + * As we don't have a real browser, we use {@link WebClient} from HtmlUnit as a replacement. + */ + @Test + @JvmSetting(key = JvmSettings.OIDC_PKCE_ENABLED, value = "true") + void testAuthorizationCodeFlowWithPKCE() throws Exception { + // given + String state = "foobar"; + String callbackUrl = "http://localhost:8080/oauth2callback.xhtml"; + + OIDCAuthProvider oidcAuthProvider = getProvider(); + String authzUrl = oidcAuthProvider.buildAuthzUrl(state, callbackUrl); + //System.out.println(authzUrl); + + try (WebClient webClient = new WebClient()) { + webClient.getOptions().setCssEnabled(false); + webClient.getOptions().setJavaScriptEnabled(false); + // We *want* to know about the redirect, as it contains the data we need! + webClient.getOptions().setRedirectEnabled(false); + + HtmlPage loginPage = webClient.getPage(authzUrl); + assumeTrue(loginPage.getTitleText().contains("Sign in to " + realm)); + + HtmlForm form = loginPage.getForms().get(0); + HtmlInput username = form.getInputByName("username"); + HtmlInput password = form.getInputByName("password"); + HtmlSubmitInput submit = form.getInputByName("login"); + + username.type(realmAdminUser); + password.type(realmAdminPassword); + + FailingHttpStatusCodeException exception = assertThrows(FailingHttpStatusCodeException.class, submit::click); + assertEquals(302, exception.getStatusCode()); + + WebResponse response = exception.getResponse(); + assertNotNull(response); + + String callbackLocation = response.getResponseHeaderValue("Location"); + assertTrue(callbackLocation.startsWith(callbackUrl)); + //System.out.println(callbackLocation); + + String queryPart = callbackLocation.trim().split("\\?")[1]; + Map parameters = Pattern.compile("\\s*&\\s*") + .splitAsStream(queryPart) + .map(s -> s.split("=", 2)) + .collect(Collectors.toMap(a -> a[0], a -> a.length > 1 ? a[1]: "")); + //System.out.println(map); + assertTrue(parameters.containsKey("code")); + assertTrue(parameters.containsKey("state")); + + OAuth2UserRecord userRecord = oidcAuthProvider.getUserRecord( + parameters.get("code"), + parameters.get("state"), + callbackUrl + ); + + assertNotNull(userRecord); + assertEquals(realmAdminUser, userRecord.getUsername()); + } catch (OAuth2Exception e) { + System.out.println(e.getMessageBody()); + throw e; + } + } } \ No newline at end of file From 6e1ab82a1fa8e18e1765238556af241b6d0d39f3 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 22 May 2023 15:57:55 +0100 Subject: [PATCH 0215/1525] Added: handling when private url user not found in getPrivateUrlDatasetVersion endpoint --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 3 +++ src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 7036fb5fccc..a7546c2a3fd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3872,6 +3872,9 @@ public Response getDatasetSummaryFieldNames() { public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String privateUrlToken, @QueryParam("anonymizedFieldValue") String anonymizedFieldValue) { PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken); + if (privateUrlUser == null) { + return notFound("Private URL user not found"); + } boolean isAnonymizedAccess = privateUrlUser.hasAnonymizedAccess(); String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames); if(isAnonymizedAccess && anonymizedFieldTypeNames == null) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 9d86723bcd1..89c80034321 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3125,5 +3125,9 @@ public void getPrivateUrlDatasetVersion() { .statusCode(OK.getStatusCode()); UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); + + // Test invalid token + getPrivateUrlDatasetVersionAnonymizedResponse = UtilIT.getPrivateUrlDatasetVersion("invalidToken", null); + getPrivateUrlDatasetVersionAnonymizedResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } } From 9e7756167e07170b609fcf22165dca4730af2e2c Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 22 May 2023 17:14:48 -0400 Subject: [PATCH 0216/1525] File pids in individual collections (#8889) --- .../edu/harvard/iq/dataverse/Dataverse.java | 11 +++- .../harvard/iq/dataverse/api/Dataverses.java | 54 +++++++++++++++++++ .../datasetutility/AddReplaceFileHelper.java | 2 +- .../FinalizeDatasetPublicationCommand.java | 2 +- .../command/impl/PublishDatasetCommand.java | 2 +- .../UpdateDvObjectPIDMetadataCommand.java | 2 +- .../iq/dataverse/util/SystemConfig.java | 32 +++++++++-- .../workflow/WorkflowServiceBean.java | 2 +- ...5.14.0.1__8889-filepids-in-collections.sql | 1 + 9 files changed, 99 insertions(+), 9 deletions(-) create mode 100644 src/main/resources/db/migration/V5.14.0.1__8889-filepids-in-collections.sql diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index bc8716b6129..6498f942cdb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -590,8 +590,17 @@ public void setCitationDatasetFieldTypes(List citationDatasetF this.citationDatasetFieldTypes = citationDatasetFieldTypes; } - + @Column(nullable = true) + private Boolean filePIDsEnabled; + public Boolean getFilePIDsEnabled() { + return filePIDsEnabled; + } + + public void setFilePIDsEnabled(boolean filePIDsEnabled) { + this.filePIDsEnabled = filePIDsEnabled; + } + public List getDataverseFacets() { return getDataverseFacets(false); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 8ebeefec405..51b7c403207 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -75,6 +75,7 @@ import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; +import edu.harvard.iq.dataverse.util.json.JsonPrinter; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief; import java.io.StringReader; import java.util.Collections; @@ -122,6 +123,7 @@ import java.util.Optional; import java.util.stream.Collectors; import javax.servlet.http.HttpServletResponse; +import javax.validation.constraints.NotNull; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.StreamingOutput; @@ -575,6 +577,58 @@ public Response deleteDataverse(@Context ContainerRequestContext crc, @PathParam }, getRequestUser(crc)); } + /** + * Endpoint to change attributes of a Dataverse collection. + * + * @apiNote Example curl command: + * curl -X PUT -d "test" http://localhost:8080/api/dataverses/$ALIAS/attribute/alias + * to change the alias of the collection named $ALIAS to "test". + */ + @PUT + @AuthRequired + @Path("{identifier}/attribute/{attribute}") + public Response updateAttribute(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier, + @PathParam("attribute") String attribute, @NotNull String value) { + try { + Dataverse collection = findDataverseOrDie(identifier); + User user = getRequestUser(crc); + DataverseRequest dvRequest = createDataverseRequest(user); + + // TODO: The cases below use hard coded strings, because we have no place for definitions of those! + // They are taken from util.json.JsonParser / util.json.JsonPrinter. This shall be changed. + // This also should be extended to more attributes, like the type, theme, contacts, some booleans, etc. + switch (attribute) { + case "alias": + collection.setAlias(value); + break; + case "name": + collection.setName(value); + break; + case "description": + collection.setDescription(value); + break; + case "affiliation": + collection.setAffiliation(value); + break; + case "filePIDsEnabled": + collection.setFilePIDsEnabled(parseBooleanOrDie(value)); + default: + return badRequest("'" + attribute + "' is not a supported attribute"); + } + + // Off to persistence layer + execCommand(new UpdateDataverseCommand(collection, null, null, dvRequest, null)); + + // Also return modified collection to user + return ok("Update successful", JsonPrinter.json(collection)); + + // TODO: This is an anti-pattern, necessary due to this bean being an EJB, causing very noisy and unnecessary + // logging by the EJB container for bubbling exceptions. (It would be handled by the error handlers.) + } catch (WrappedResponse e) { + return e.getResponse(); + } + } + @DELETE @AuthRequired @Path("{linkingDataverseId}/deleteLink/{linkedDataverseId}") diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index bf831d39965..f22f5dd3442 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -643,7 +643,7 @@ private boolean runAddReplacePhase1(Dataset owner, df.setRootDataFileId(fileToReplace.getRootDataFileId()); } // Reuse any file PID during a replace operation (if File PIDs are in use) - if (systemConfig.isFilePIDsEnabled()) { + if (systemConfig.isFilePIDsEnabledForCollection(owner.getOwner())) { df.setGlobalId(fileToReplace.getGlobalId()); df.setGlobalIdCreateTime(fileToReplace.getGlobalIdCreateTime()); // Should be true or fileToReplace wouldn't have an identifier (since it's not diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index cb46b36eb53..80438c1bfc0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -373,7 +373,7 @@ private void publicizeExternalIdentifier(Dataset dataset, CommandContext ctxt) t String currentGlobalIdProtocol = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Protocol, ""); String currentGlobalAuthority = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Authority, ""); String dataFilePIDFormat = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT"); - boolean isFilePIDsEnabled = ctxt.systemConfig().isFilePIDsEnabled(); + boolean isFilePIDsEnabled = ctxt.systemConfig().isFilePIDsEnabledForCollection(getDataset().getOwner()); // We will skip trying to register the global identifiers for datafiles // if "dependent" file-level identifiers are requested, AND the naming // protocol, or the authority of the dataset global id is different from diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java index 5e29a21b6a1..f5ef121dee2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java @@ -135,7 +135,7 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException String dataFilePIDFormat = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT"); boolean registerGlobalIdsForFiles = (currentGlobalIdProtocol.equals(theDataset.getProtocol()) || dataFilePIDFormat.equals("INDEPENDENT")) - && ctxt.systemConfig().isFilePIDsEnabled(); + && ctxt.systemConfig().isFilePIDsEnabledForCollection(theDataset.getOwner()); if ( registerGlobalIdsForFiles ){ registerGlobalIdsForFiles = currentGlobalAuthority.equals( theDataset.getAuthority() ); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java index 7e37241563c..7230f9f9c0a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java @@ -57,7 +57,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { // didn't need updating. String currentGlobalIdProtocol = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Protocol, ""); String dataFilePIDFormat = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT"); - boolean isFilePIDsEnabled = ctxt.systemConfig().isFilePIDsEnabled(); + boolean isFilePIDsEnabled = ctxt.systemConfig().isFilePIDsEnabledForCollection(target.getOwner()); // We will skip trying to update the global identifiers for datafiles if they // aren't being used. // If they are, we need to assure that there's an existing PID or, as when diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index bf6dddd621a..96122c9caa6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -2,6 +2,7 @@ import com.ocpsoft.pretty.PrettyContext; import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.DvObjectContainer; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; @@ -995,9 +996,34 @@ public boolean isAllowCustomTerms() { return settingsService.isTrueForKey(SettingsServiceBean.Key.AllowCustomTermsOfUse, safeDefaultIfKeyNotFound); } - public boolean isFilePIDsEnabled() { - boolean safeDefaultIfKeyNotFound = true; - return settingsService.isTrueForKey(SettingsServiceBean.Key.FilePIDsEnabled, safeDefaultIfKeyNotFound); + public boolean isFilePIDsEnabledForCollection(Dataverse collection) { + if (collection == null) { + return false; + } + + // Check the instance-wide setting first. If enabled for the entire + // instance, it's enabled for any single collection as well: (TODO: ?) + if (settingsService.isTrueForKey(SettingsServiceBean.Key.FilePIDsEnabled, true)) { + return true; + } + + // ... but if disabled instance-wide, it may still be enabled on a + // specific collection (?) + Dataverse thisCollection = collection; + + // If neither enabled nor disabled specifically for this collection, + // the parent collection setting is inhereted (recursively): + while (thisCollection.getFilePIDsEnabled() == null) { + if (thisCollection.getOwner() == null) { + // We've reached the root collection, and file PIDs registration + // hasn't been explicitly enabled, therefore we presume it is + // disabled for our collection: + return false; + } + thisCollection = thisCollection.getOwner(); + } + + return thisCollection.getFilePIDsEnabled(); } public boolean isIndependentHandleService() { diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java index d57b7072be7..cf78c4f8cdf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java @@ -394,7 +394,7 @@ private void workflowCompleted(Workflow wf, WorkflowContext ctxt) { String dataFilePIDFormat = settings.getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT"); boolean registerGlobalIdsForFiles = (currentGlobalIdProtocol.equals(ctxt.getDataset().getProtocol()) || dataFilePIDFormat.equals("INDEPENDENT")) - && systemConfig.isFilePIDsEnabled(); + && systemConfig.isFilePIDsEnabledForCollection(ctxt.getDataset().getOwner()); if ( registerGlobalIdsForFiles ){ registerGlobalIdsForFiles = currentGlobalAuthority.equals( ctxt.getDataset().getAuthority() ); } diff --git a/src/main/resources/db/migration/V5.14.0.1__8889-filepids-in-collections.sql b/src/main/resources/db/migration/V5.14.0.1__8889-filepids-in-collections.sql new file mode 100644 index 00000000000..5e6ce945fe2 --- /dev/null +++ b/src/main/resources/db/migration/V5.14.0.1__8889-filepids-in-collections.sql @@ -0,0 +1 @@ +ALTER TABLE dataverse ADD COLUMN IF NOT EXISTS filePIDsEnabled bool; From bc84ed5d9d74a1563840fa2536ea518c29aa8952 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 23 May 2023 10:31:44 -0400 Subject: [PATCH 0217/1525] change to the configuration logic. #8889 --- .../harvard/iq/dataverse/util/SystemConfig.java | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 96122c9caa6..45f7f396783 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1001,14 +1001,6 @@ public boolean isFilePIDsEnabledForCollection(Dataverse collection) { return false; } - // Check the instance-wide setting first. If enabled for the entire - // instance, it's enabled for any single collection as well: (TODO: ?) - if (settingsService.isTrueForKey(SettingsServiceBean.Key.FilePIDsEnabled, true)) { - return true; - } - - // ... but if disabled instance-wide, it may still be enabled on a - // specific collection (?) Dataverse thisCollection = collection; // If neither enabled nor disabled specifically for this collection, @@ -1016,13 +1008,16 @@ public boolean isFilePIDsEnabledForCollection(Dataverse collection) { while (thisCollection.getFilePIDsEnabled() == null) { if (thisCollection.getOwner() == null) { // We've reached the root collection, and file PIDs registration - // hasn't been explicitly enabled, therefore we presume it is - // disabled for our collection: - return false; + // hasn't been explicitly enabled, therefore we presume that it is + // subject to how the registration is configured for the + // entire instance: + return settingsService.isTrueForKey(SettingsServiceBean.Key.FilePIDsEnabled, true); } thisCollection = thisCollection.getOwner(); } + // If present, the setting of the first direct ancestor collection + // takes precedent: return thisCollection.getFilePIDsEnabled(); } From 4d9e9b1c405a99864159c02cdd0fb128bd1f0a2f Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 23 May 2023 10:55:48 -0400 Subject: [PATCH 0218/1525] Guide entry change for the instance-wide setting. #8889 --- doc/sphinx-guides/source/installation/config.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index c23f71ce4f3..42daa83d654 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2704,7 +2704,7 @@ timestamps. :FilePIDsEnabled ++++++++++++++++ -Toggles publishing of file-based PIDs for the entire installation. By default this setting is absent and Dataverse Software assumes it to be true. If enabled, the registration will be performed asynchronously (in the background) during publishing of a dataset. +Toggles publishing of file-level PIDs for the entire installation. By default this setting is absent and Dataverse Software assumes it to be true. If enabled, the registration will be performed asynchronously (in the background) during publishing of a dataset. If you don't want to register file-based PIDs for your installation, set: @@ -2712,6 +2712,8 @@ If you don't want to register file-based PIDs for your installation, set: Note: File-level PID registration was added in Dataverse Software 4.9; it could not be disabled until Dataverse Software 4.9.3. +It is possible to override the installation-wide setting for specific collections. For example, it registration of PIDs for files can be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. See ... for details. + .. _:IndependentHandleService: :IndependentHandleService From e757a77f34ef6aa0f17cb2d0249dc016e9a84774 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 23 May 2023 14:00:40 -0400 Subject: [PATCH 0219/1525] more guide entries #8889 --- doc/sphinx-guides/source/api/native-api.rst | 18 ++++++++++++++++++ .../source/installation/config.rst | 2 +- .../harvard/iq/dataverse/api/Dataverses.java | 14 ++++++++++++-- 3 files changed, 31 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 6f10b28f55b..0f6d7c1aa5a 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -682,6 +682,24 @@ The fully expanded example above (without environment variables) looks like this curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/guestbookResponses?guestbookId=1 -o myResponses.csv +.. _collection-attributes-api: + +Change Collection Attributes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: + + curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/attribute/$ATTRIBUTE?value=$VALUE + +The following attributes are supported: + +* ``alias`` Collection alias +* ``name`` Name +* ``description`` Description +* ``affiliation`` Affiliation +* ``filePIDsEnabled`` ("true" or "false") Enables or disables registraion of file-level PIDs in datasets within the collection (overriding the instance-wide setting). + + Datasets -------- diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 42daa83d654..51e2c9dfd25 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2712,7 +2712,7 @@ If you don't want to register file-based PIDs for your installation, set: Note: File-level PID registration was added in Dataverse Software 4.9; it could not be disabled until Dataverse Software 4.9.3. -It is possible to override the installation-wide setting for specific collections. For example, it registration of PIDs for files can be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. See ... for details. +It is possible to override the installation-wide setting for specific collections. For example, it registration of PIDs for files can be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. See :ref:`collection-attributes-api` for details. .. _:IndependentHandleService: diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 51b7c403207..06747a9f142 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -161,7 +161,7 @@ public class Dataverses extends AbstractApiBean { @EJB SwordServiceBean swordService; - + @POST @AuthRequired public Response addRoot(@Context ContainerRequestContext crc, String body) { @@ -610,12 +610,22 @@ public Response updateAttribute(@Context ContainerRequestContext crc, @PathParam case "affiliation": collection.setAffiliation(value); break; + /* commenting out the code from the draft pr #9462: + case "versionPidsConduct": + CollectionConduct conduct = CollectionConduct.findBy(value); + if (conduct == null) { + return badRequest("'" + value + "' is not one of [" + + String.join(",", CollectionConduct.asList()) + "]"); + } + collection.setDatasetVersionPidConduct(conduct); + break; + */ case "filePIDsEnabled": collection.setFilePIDsEnabled(parseBooleanOrDie(value)); default: return badRequest("'" + attribute + "' is not a supported attribute"); } - + // Off to persistence layer execCommand(new UpdateDataverseCommand(collection, null, null, dvRequest, null)); From 0ec9c73a703b3a2ae16f5fca2e63fe6b583c406b Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 23 May 2023 14:36:31 -0400 Subject: [PATCH 0220/1525] release note for #8889 --- doc/release-notes/8889-filepids-in-collections.md | 3 +++ doc/sphinx-guides/source/installation/config.rst | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 doc/release-notes/8889-filepids-in-collections.md diff --git a/doc/release-notes/8889-filepids-in-collections.md b/doc/release-notes/8889-filepids-in-collections.md new file mode 100644 index 00000000000..bc8aeea3b56 --- /dev/null +++ b/doc/release-notes/8889-filepids-in-collections.md @@ -0,0 +1,3 @@ +It is now possible to configure registering PIDs for files in individual collections. + +For example, registration of PIDs for files can be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. See the [:FilePIDsEnabled](https://guides.dataverse.org/en/latest/installation/config.html#filepidsenabled) section of the Configuration guide for details. \ No newline at end of file diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 51e2c9dfd25..27ddb328b73 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2712,7 +2712,7 @@ If you don't want to register file-based PIDs for your installation, set: Note: File-level PID registration was added in Dataverse Software 4.9; it could not be disabled until Dataverse Software 4.9.3. -It is possible to override the installation-wide setting for specific collections. For example, it registration of PIDs for files can be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. See :ref:`collection-attributes-api` for details. +It is possible to override the installation-wide setting for specific collections. For example, registration of PIDs for files can be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. See :ref:`collection-attributes-api` for details. .. _:IndependentHandleService: From ff34b4666cc2649be476f581b705461dee1ad945 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 23 May 2023 15:18:55 -0400 Subject: [PATCH 0221/1525] Added the handling for the file pids attribute to the json parser/printer #8889 --- .../java/edu/harvard/iq/dataverse/util/json/JsonParser.java | 4 ++++ .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 3 +++ 2 files changed, 7 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 4fe9654cc64..59290449988 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -153,6 +153,10 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { } } } + + if (jobj.containsKey("filePIDsEnabled")) { + dv.setFilePIDsEnabled(jobj.getBoolean("filePIDsEnabled")); + } /* We decided that subject is not user set, but gotten from the subject of the dataverse's datasets - leavig this code in for now, in case we need to go back to it at some point diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index fd15bb118b0..c72dbee6de3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -294,6 +294,9 @@ public static JsonObjectBuilder json(Dataverse dv, Boolean hideEmail) { if(dv.getStorageDriverId() != null) { bld.add("storageDriverLabel", DataAccess.getStorageDriverLabelFor(dv.getStorageDriverId())); } + if (dv.getFilePIDsEnabled() != null) { + bld.add("filePIDsEnabled", dv.getFilePIDsEnabled()); + } return bld; } From 82e91f204e01875ad95af4522ec61c544749f45e Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 23 May 2023 16:32:00 -0400 Subject: [PATCH 0222/1525] a simple restassured test for disabling file pid registration within a collection (#8889) --- .../edu/harvard/iq/dataverse/api/FilesIT.java | 69 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 6 ++ 2 files changed, 75 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index ed4d255ab74..8b0afab01d6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2018,4 +2018,73 @@ public void testDeleteFile() { .body("data.files[0]", equalTo(null)) .statusCode(OK.getStatusCode()); } + + @Test + public void testFilePIDsBehavior() { + // Create user + String apiToken = createUserGetToken(); + + // Create Dataverse + String collectionAlias = createDataverseGetAlias(apiToken); + + // Create Initial Dataset with 1 file: + Integer datasetId = createDatasetGetId(collectionAlias, apiToken); + String pathToFile = "scripts/search/data/replace_test/003.txt"; + Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + + addResponse.then().assertThat() + .body("data.files[0].dataFile.contentType", equalTo("text/plain")) + .body("data.files[0].label", equalTo("003.txt")) + .statusCode(OK.getStatusCode()); + + Long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id"); + + // ------------------------- + // Publish dataverse and dataset + // ------------------------- + msg("Publish dataverse and dataset"); + Response publishDataversetResp = UtilIT.publishDataverseViaSword(collectionAlias, apiToken); + publishDataversetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + + // The file in this dataset should have been assigned a PID when it was published: + Response fileInfoResponse = UtilIT.getFileData(origFileId.toString(), apiToken); + fileInfoResponse.then().assertThat().statusCode(OK.getStatusCode()); + String fileInfoResponseString = fileInfoResponse.body().asString(); + msg(fileInfoResponseString); + + assertNotNull(JsonPath.from(fileInfoResponseString).getString("data.dataFile.persistentId")); + + // Now change the file PIDs registration configuration for the collection: + + Response changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "filePIDsEnabled", "false", apiToken); + + // ... And do the whole thing with creating another dataset with a file: + + datasetId = createDatasetGetId(collectionAlias, apiToken); + addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + addResponse.then().assertThat().statusCode(OK.getStatusCode()); + Long newFileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id"); + + // And publish this dataset: + msg("Publish second dataset"); + + publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + + // And confirm that the file didn't get a PID: + + fileInfoResponse = UtilIT.getFileData(origFileId.toString(), apiToken); + fileInfoResponse.then().assertThat().statusCode(OK.getStatusCode()); + fileInfoResponseString = fileInfoResponse.body().asString(); + msg(fileInfoResponseString); + + assertNull(JsonPath.from(fileInfoResponseString).getString("data.dataFile.persistentId")); + + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 1937905b56f..35bdbb26fc9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2091,6 +2091,12 @@ static Response setDataverseLogo(String dataverseAlias, String pathToImageFile, .multiPart("file", new File(pathToImageFile)) .put("/api/dataverses/" + dataverseAlias + "/logo"); } + + static Response setCollectionAttribute(String dataverseAlias, String attribute, String value, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .put("/api/dataverses/" + dataverseAlias + "/attribute/" + attribute + "?value=" + value); + } /** * Deprecated as the apiToken is not used by the call. From 033d8e9f58cd39d8a697354facffc0d76abf59bd Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 23 May 2023 23:12:00 +0200 Subject: [PATCH 0223/1525] chore(build): update many Maven plugins to latest available versions This is mostly because with Maven 3.9 it starts to verify if a plugin is going to be compatible with Maven 4. Most plugins are being updated at the moment. --- modules/dataverse-parent/pom.xml | 28 ++++++++++++++++++++-------- pom.xml | 2 -- 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 060fc22b4d2..97e83684d1a 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -174,19 +174,21 @@ 5.7.0 ${junit.jupiter.version} 2.28.2 + 0.8.10 9.3 - 3.8.1 - 3.2.2 + 3.11.0 + 3.3.0 3.3.2 - 3.2.0 - 3.0.0-M1 - 3.0.0-M5 - 3.0.0-M5 - 3.3.0 - 3.1.2 + 3.5.0 + 3.1.1 + 3.1.0 + 3.1.0 + 3.6.0 + 3.3.1 + 3.2.2 0.42.1 @@ -245,6 +247,11 @@ maven-failsafe-plugin ${maven-failsafe-plugin.version} + + org.apache.maven.plugins + maven-resources-plugin + ${maven-resources-plugin.version} + org.apache.maven.plugins maven-checkstyle-plugin @@ -257,6 +264,11 @@ + + org.jacoco + jacoco-maven-plugin + ${maven-jacoco-plugin.version} + io.fabric8 docker-maven-plugin diff --git a/pom.xml b/pom.xml index 2ba01e5ab61..28fc2b25af5 100644 --- a/pom.xml +++ b/pom.xml @@ -26,7 +26,6 @@ 1.2.18.4 8.5.10 1.20.1 - 0.8.7 5.2.1 2.4.1 5.5.3 @@ -704,7 +703,6 @@ org.jacoco jacoco-maven-plugin - ${jacoco.version} ${basedir}/target/coverage-reports/jacoco-unit.exec ${basedir}/target/coverage-reports/jacoco-unit.exec From cf2088d000e7c475a15243222f42b19be0c76312 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 23 May 2023 17:15:56 -0400 Subject: [PATCH 0224/1525] fix S3 direct upload NPE and keep NetCDF metadata extraction #9601 Note that the NcML aux file is not created when S3 direct upload is enabled. --- .../dataverse/ingest/IngestServiceBean.java | 68 ++++++++++++------- 1 file changed, 45 insertions(+), 23 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 7cdfda8d082..9d3e7fb1161 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -332,9 +332,6 @@ public List saveAndAddFilesToDataset(DatasetVersion version, } catch (IOException e) { logger.warning("Error getting ingest limit for file: " + dataFile.getIdentifier() + " : " + e.getMessage()); } - if (unattached) { - dataFile.setOwner(null); - } if (savedSuccess && belowLimit) { // These are all brand new files, so they should all have // one filemetadata total. -- L.A. @@ -388,6 +385,9 @@ public List saveAndAddFilesToDataset(DatasetVersion version, dataFile.setContentType(FileUtil.MIME_TYPE_TSV); } } + if (unattached) { + dataFile.setOwner(null); + } // ... and let's delete the main temp file if it exists: if(tempLocationPath!=null) { try { @@ -1294,37 +1294,54 @@ public boolean extractMetadata(String tempFileLocation, DataFile dataFile, Datas * extractable from all files that the NetCDF Java library can open only * some NetCDF files will have a bounding box. * - * Note that if we ever create an API endpoint for this method for files - * that are already persisted to disk or S3, we will need to use something - * like getExistingFile() from extractMetadataNcml() to pull the file down - * from S3 to a temporary file location on local disk so that it can - * (ultimately) be opened by the NetcdfFiles.open() method, which only - * operates on local files (not an input stream). What we have now is not a - * problem for S3 because the files are saved locally before the are - * uploaded to S3. It's during this time that the files are local that this - * method is run. + * Note that if we haven't yet created an API endpoint for this method for + * files that are already persisted to disk or S3, but the code should work + * to download files from S3 as necessary. */ public boolean extractMetadataFromNetcdf(String tempFileLocation, DataFile dataFile, DatasetVersion editVersion) throws IOException { boolean ingestSuccessful = false; - InputStream tempFileInputStream = null; - if (tempFileLocation == null) { - StorageIO sio = dataFile.getStorageIO(); - sio.open(DataAccessOption.READ_ACCESS); - tempFileInputStream = sio.getInputStream(); + String dataFileLocation = null; + if (tempFileLocation != null) { + logger.info("tempFileLocation is non null. Setting dataFileLocation to " + tempFileLocation); + dataFileLocation = tempFileLocation; } else { + logger.info("tempFileLocation is null. Perhaps the file is alrady on disk or S3 direct upload is enabled."); + File tempFile = null; + File localFile; + StorageIO storageIO; try { - tempFileInputStream = new FileInputStream(new File(tempFileLocation)); - } catch (FileNotFoundException notfoundEx) { - throw new IOException("Could not open temp file " + tempFileLocation); + storageIO = dataFile.getStorageIO(); + storageIO.open(); + if (storageIO.isLocalFile()) { + localFile = storageIO.getFileSystemPath().toFile(); + dataFileLocation = localFile.getAbsolutePath(); + logger.info("extractMetadataFromNetcdf: file is local. Path: " + dataFileLocation); + } else { + // Need to create a temporary local file: + tempFile = File.createTempFile("tempFileExtractMetadataNetcdf", ".tmp"); + try ( ReadableByteChannel targetFileChannel = (ReadableByteChannel) storageIO.getReadChannel(); FileChannel tempFileChannel = new FileOutputStream(tempFile).getChannel();) { + tempFileChannel.transferFrom(targetFileChannel, 0, storageIO.getSize()); + } + dataFileLocation = tempFile.getAbsolutePath(); + logger.info("extractMetadataFromNetcdf: file is on S3. Downloaded and saved to temp path: " + dataFileLocation); + } + } catch (IOException ex) { + logger.info("extractMetadataFromNetcdf, could not use storageIO for data file id " + dataFile.getId() + ". Exception: " + ex); + return false; } } + if (dataFileLocation == null) { + logger.fine("after all that dataFileLocation is still null! Returning early."); + return false; + } + // Locate metadata extraction plugin for the file format by looking // it up with the Ingest Service Provider Registry: NetcdfFileMetadataExtractor extractorPlugin = new NetcdfFileMetadataExtractor(); - logger.fine("creating file from " + tempFileLocation); - File file = new File(tempFileLocation); + logger.info("creating file from " + dataFileLocation); + File file = new File(dataFileLocation); FileMetadataIngest extractedMetadata = extractorPlugin.ingestFile(file); Map> extractedMetadataMap = extractedMetadata.getMetadataMap(); @@ -1361,9 +1378,11 @@ public boolean extractMetadataNcml(DataFile dataFile, Path tempLocationPath) { InputStream inputStream = null; String dataFileLocation = null; if (tempLocationPath != null) { + logger.info("extractMetadataNcml: tempLocationPath is non null. Setting dataFileLocation to " + tempLocationPath); // This file was just uploaded and hasn't been saved to S3 or local storage. dataFileLocation = tempLocationPath.toString(); } else { + logger.info("extractMetadataNcml: tempLocationPath null. Calling getExistingFile for dataFileLocation."); dataFileLocation = getExistingFile(dataFile, dataFileLocation); } if (dataFileLocation != null) { @@ -1425,7 +1444,7 @@ private boolean isNcmlFileCreated(final NetcdfFile netcdfFile, Path tempLocation } private String getExistingFile(DataFile dataFile, String dataFileLocation) { - // This file is already on S3 or local storage. + // This file is already on S3 (non direct upload) or local storage. File tempFile = null; File localFile; StorageIO storageIO; @@ -1436,6 +1455,7 @@ private String getExistingFile(DataFile dataFile, String dataFileLocation) { localFile = storageIO.getFileSystemPath().toFile(); dataFileLocation = localFile.getAbsolutePath(); logger.fine("extractMetadataNcml: file is local. Path: " + dataFileLocation); + logger.info("getExistingFile: file is local. Path: " + dataFileLocation); } else { // Need to create a temporary local file: tempFile = File.createTempFile("tempFileExtractMetadataNcml", ".tmp"); @@ -1444,9 +1464,11 @@ private String getExistingFile(DataFile dataFile, String dataFileLocation) { } dataFileLocation = tempFile.getAbsolutePath(); logger.fine("extractMetadataNcml: file is on S3. Downloaded and saved to temp path: " + dataFileLocation); + logger.info("getExistingFile: file is on S3. Downloaded and saved to temp path: " + dataFileLocation); } } catch (IOException ex) { logger.info("While attempting to extract NcML, could not use storageIO for data file id " + dataFile.getId() + ". Exception: " + ex); + logger.info("getExistingFile: While attempting to extract NcML, could not use storageIO for data file id " + dataFile.getId() + ". Exception: " + ex); } return dataFileLocation; } From f112fe46e93133e7736bfcb018623b07eebee776 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 00:05:01 +0200 Subject: [PATCH 0225/1525] build,test: reconfigure JaCoCo test coverage measuring With the addition of integration tests we need to create a combined report of unit tests and integration tests (for now still keeping API tests out of the loop for normal cases). This commit reconfigures the JaCoCo plugin with multiple executions at certain times to execute the measurements and merge the results together. --- pom.xml | 72 ++++++++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 66 insertions(+), 6 deletions(-) diff --git a/pom.xml b/pom.xml index 28fc2b25af5..5d1523e01b8 100644 --- a/pom.xml +++ b/pom.xml @@ -703,23 +703,78 @@ org.jacoco jacoco-maven-plugin - - ${basedir}/target/coverage-reports/jacoco-unit.exec - ${basedir}/target/coverage-reports/jacoco-unit.exec - jacoco-initialize prepare-agent + + ${project.build.directory}/coverage-reports/jacoco-unit.exec + surefire.jacoco.args + - jacoco-site - package + jacoco-after-unit + test report + + ${project.build.directory}/coverage-reports/jacoco-unit.exec + ${project.reporting.outputDirectory}/jacoco-unit-test-coverage-report + + + + jacoco-initialize-it + pre-integration-test + + prepare-agent + + + ${project.build.directory}/coverage-reports/jacoco-integration.exec + failsafe.jacoco.args + + + + jacoco-after-it + post-integration-test + + report + + + ${project.build.directory}/coverage-reports/jacoco-integration.exec + ${project.reporting.outputDirectory}/jacoco-integration-test-coverage-report + + + + jacoco-merge-unit-and-it + post-integration-test + + merge + + + + + ${project.build.directory}/coverage-reports/ + + *.exec + + + + ${project.build.directory}/coverage-reports/merged.exec + + + + jacoco-report + post-integration-test + + report + + + ${project.build.directory}/coverage-reports/merged.exec + ${project.reporting.outputDirectory}/jacoco-merged-test-coverage-report + @@ -734,6 +789,9 @@ 2.3.1 + + ${project.reporting.outputDirectory}/jacoco-merged-test-coverage-report/jacoco.xml + org.apache.maven.plugins @@ -743,6 +801,7 @@ ${testsToExclude} ${skipUnitTests} + ${surefire.jacoco.args} ${argLine} @@ -751,6 +810,7 @@ maven-failsafe-plugin testcontainers + ${failsafe.jacoco.args} ${argLine} From 5975e268ef1150ef76edd589e1ce35a74e683ccf Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 00:23:54 +0200 Subject: [PATCH 0226/1525] ci: restructure Maven tests to include integration tests --- .github/workflows/maven_unit_test.yml | 78 +++++++++++++++++++++++++-- 1 file changed, 75 insertions(+), 3 deletions(-) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 629a45a6c2c..8d51702be6b 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -33,6 +33,7 @@ jobs: continue-on-error: ${{ matrix.experimental }} runs-on: ubuntu-latest steps: + # Basic setup chores - uses: actions/checkout@v2 - name: Set up JDK ${{ matrix.jdk }} uses: actions/setup-java@v2 @@ -45,13 +46,84 @@ jobs: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: ${{ runner.os }}-m2 + - name: Build with Maven run: mvn -DcompilerArgument=-Xlint:unchecked -Dtarget.java.version=${{ matrix.jdk }} -P all-unit-tests clean test - - name: Maven Code Coverage - env: + + # Store the build for the next step (integration test) to avoid recompilation and to transfer coverage reports + - run: tar -cvf java-builddir.tar target + - uses: actions/upload-artifact@v3 + with: + name: java-builddir + path: java-builddir.tar + retention-days: 3 + integration-test: + runs-on: ubuntu-latest + needs: unittest + steps: + # Basic setup chores + - uses: actions/checkout@v3 + - name: Set up JDK ${{ matrix.jdk }} + uses: actions/setup-java@v2 + with: + java-version: ${{ matrix.jdk }} + distribution: 'adopt' + - name: Cache Maven packages + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: ${{ runner.os }}-m2 + + # Get the build output from the unit test job + - uses: actions/download-artifact@v3 + with: + name: java-builddir + - run: tar -xvf java-builddir.tar; ls -laR target + + # Run integration tests + - run: mvn -Dtarget.java.version=${{ matrix.jdk }} verify + + # Wrap up and send to coverage job + - run: tar -cvf java-builddir.tar target + - uses: actions/upload-artifact@v3 + with: + name: java-builddir + path: java-builddir.tar + retention-days: 3 + coverage-report: + runs-on: ubuntu-latest + needs: integration-test + steps: + # Basic setup chores + - uses: actions/checkout@v3 + - name: Set up JDK ${{ matrix.jdk }} + uses: actions/setup-java@v2 + with: + java-version: ${{ matrix.jdk }} + distribution: 'adopt' + - name: Cache Maven packages + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: ${{ runner.os }}-m2 + + # Get the build output from the integration test job + - uses: actions/download-artifact@v3 + with: + name: java-builddir + - run: tar -xvf java-builddir.tar; ls -laR target + + # Deposit Code Coverage + - name: Maven Code Coverage + env: CI_NAME: github COVERALLS_SECRET: ${{ secrets.GITHUB_TOKEN }} - run: mvn -V -B jacoco:report coveralls:report -DrepoToken=${COVERALLS_SECRET} -DpullRequest=${{ github.event.number }} + run: mvn -V -B coveralls:report -DrepoToken=${COVERALLS_SECRET} -DpullRequest=${{ github.event.number }} + + # NOTE: this may be extended with adding a report to the build output, leave a comment, send to Sonarcloud, ... + push-app-img: name: Publish App Image permissions: From 3b7aa106d118f7cecaefd7680b7a6dd7df2fc670 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 00:29:47 +0200 Subject: [PATCH 0227/1525] chore(ci): update actions/setup-java to v3 for Maven workflow --- .github/workflows/maven_unit_test.yml | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 8d51702be6b..1ff08705e36 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -34,9 +34,9 @@ jobs: runs-on: ubuntu-latest steps: # Basic setup chores - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: java-version: ${{ matrix.jdk }} distribution: 'adopt' @@ -64,7 +64,7 @@ jobs: # Basic setup chores - uses: actions/checkout@v3 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: java-version: ${{ matrix.jdk }} distribution: 'adopt' @@ -97,10 +97,9 @@ jobs: steps: # Basic setup chores - uses: actions/checkout@v3 - - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v2 + - uses: actions/setup-java@v3 with: - java-version: ${{ matrix.jdk }} + java-version: '11' distribution: 'adopt' - name: Cache Maven packages uses: actions/cache@v2 From 2c0a6aabf5852a441009aca39625e8a71eed66b9 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 00:30:13 +0200 Subject: [PATCH 0228/1525] ci(test): fix missing build matrix for integration test job --- .github/workflows/maven_unit_test.yml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 1ff08705e36..3c1c5e7e3e2 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -60,6 +60,23 @@ jobs: integration-test: runs-on: ubuntu-latest needs: unittest + name: (${{ matrix.status}} / JDK ${{ matrix.jdk }}) Integration Tests + strategy: + fail-fast: false + matrix: + jdk: [ '11' ] + experimental: [ false ] + status: [ "Stable" ] + # + # JDK 17 builds disabled due to non-essential fails marking CI jobs as completely failed within + # Github Projects, PR lists etc. This was consensus on Slack #dv-tech. See issue #8094 + # (This is a limitation of how Github is currently handling these things.) + # + #include: + # - jdk: '17' + # experimental: true + # status: "Experimental" + continue-on-error: ${{ matrix.experimental }} steps: # Basic setup chores - uses: actions/checkout@v3 From 8a6f23166870c9460c635535cb021029d45509f5 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 00:43:36 +0200 Subject: [PATCH 0229/1525] ci(test): tweak artifact upload and job names --- .github/workflows/maven_unit_test.yml | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 3c1c5e7e3e2..a4753f24668 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -1,4 +1,4 @@ -name: Maven Unit Tests +name: Maven Tests on: push: @@ -96,21 +96,22 @@ jobs: - uses: actions/download-artifact@v3 with: name: java-builddir - - run: tar -xvf java-builddir.tar; ls -laR target + - run: tar -xvf java-builddir.tar - # Run integration tests - - run: mvn -Dtarget.java.version=${{ matrix.jdk }} verify + # Run integration tests (but not unit tests again) + - run: mvn -DskipUnitTests -Dtarget.java.version=${{ matrix.jdk }} verify # Wrap up and send to coverage job - - run: tar -cvf java-builddir.tar target + - run: tar -cvf java-reportdir.tar target/site - uses: actions/upload-artifact@v3 with: - name: java-builddir - path: java-builddir.tar + name: java-reportdir + path: java-reportdir.tar retention-days: 3 coverage-report: runs-on: ubuntu-latest needs: integration-test + name: Coverage Report Submission steps: # Basic setup chores - uses: actions/checkout@v3 @@ -128,8 +129,8 @@ jobs: # Get the build output from the integration test job - uses: actions/download-artifact@v3 with: - name: java-builddir - - run: tar -xvf java-builddir.tar; ls -laR target + name: java-reportdir + - run: tar -xvf java-reportdir.tar # Deposit Code Coverage - name: Maven Code Coverage From edef7d5bd52f3625b2e15521046d97a73f999894 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 01:20:04 +0200 Subject: [PATCH 0230/1525] docs(config): add OIDC to JVM options list with backlink --- doc/sphinx-guides/source/installation/config.rst | 7 +++++++ doc/sphinx-guides/source/installation/oidc.rst | 2 ++ 2 files changed, 9 insertions(+) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index f8aef8c59da..4b1a8bd14b3 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2420,6 +2420,13 @@ Can also be set via any `supported MicroProfile Config API source`_, e.g. the en ``DATAVERSE_UI_SHOW_VALIDITY_FILTER``. Will accept ``[tT][rR][uU][eE]|1|[oO][nN]`` as "true" expressions. +dataverse.auth.oidc.* ++++++++++++++++++++++ + +Provision a single :doc:`OpenID Connect authentication provider ` using MicroProfile Config. You can find a list of +all available options at :ref:`oidc-mpconfig`. + + .. _feature-flags: Feature Flags diff --git a/doc/sphinx-guides/source/installation/oidc.rst b/doc/sphinx-guides/source/installation/oidc.rst index 9848d73b189..0dc5ca4ff4a 100644 --- a/doc/sphinx-guides/source/installation/oidc.rst +++ b/doc/sphinx-guides/source/installation/oidc.rst @@ -116,6 +116,8 @@ The Dataverse installation will automatically try to load the provider and retri You should see the new provider under "Other options" on the Log In page, as described in the :doc:`/user/account` section of the User Guide. +.. _oidc-mpconfig: + Provision via MPCONFIG ^^^^^^^^^^^^^^^^^^^^^^ From e91a046a790506e959ffcc9f71d27f6207b4d4e1 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 01:51:04 +0200 Subject: [PATCH 0231/1525] feat(oidc): replace map-based cache of PKCE verifiers Instead of using an unlimited growth Map of verifiers as a cache, we now will use a real evicting cache implementation to limit the size and age of entries. This will limit resource waste and mitigate an unlikely but present attack vector of pumping up the cache to DDoS us. --- pom.xml | 7 +++++++ .../providers/oauth2/oidc/OIDCAuthProvider.java | 14 ++++++++++++-- .../harvard/iq/dataverse/settings/JvmSettings.java | 2 ++ .../META-INF/microprofile-config.properties | 4 ++++ 4 files changed, 25 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 5d1523e01b8..0484a80ddd9 100644 --- a/pom.xml +++ b/pom.xml @@ -386,6 +386,13 @@ oauth2-oidc-sdk 10.9.1 + + + com.github.ben-manes.caffeine + caffeine + 3.1.6 + + io.gdcc diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index 52362f7abeb..818332ea282 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -1,5 +1,7 @@ package edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc; +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; import com.github.scribejava.core.builder.api.DefaultApi20; import com.nimbusds.oauth2.sdk.AuthorizationCode; import com.nimbusds.oauth2.sdk.AuthorizationCodeGrant; @@ -36,10 +38,13 @@ import edu.harvard.iq.dataverse.authorization.providers.oauth2.AbstractOAuth2AuthenticationProvider; import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception; import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.IOException; import java.net.URI; +import java.time.Duration; +import java.time.temporal.ChronoUnit; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -69,8 +74,13 @@ public class OIDCAuthProvider extends AbstractOAuth2AuthenticationProvider { /** * Using PKCE, we create and send a special {@link CodeVerifier}. This contains a secret * we need again when verifying the response by the provider, thus the cache. + * To be absolutely sure this may not be abused to DDoS us and not let unused verifiers rot, + * use an evicting cache implementation and not a standard map. */ - private final Map verifierCache = new ConcurrentHashMap<>(); + private final Cache verifierCache = Caffeine.newBuilder() + .maximumSize(JvmSettings.OIDC_PKCE_CACHE_MAXSIZE.lookup(Integer.class)) + .expireAfterWrite(Duration.of(JvmSettings.OIDC_PKCE_CACHE_MAXAGE.lookup(Integer.class), ChronoUnit.SECONDS)) + .build(); public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEndpointURL, boolean pkceEnabled, String pkceMethod) throws AuthorizationSetupException { @@ -201,7 +211,7 @@ public String buildAuthzUrl(String state, String callbackUrl) { public OAuth2UserRecord getUserRecord(String code, String state, String redirectUrl) throws IOException, OAuth2Exception { // Retrieve the verifier from the cache and clear from the cache. If not found, will be null. // Will be sent to token endpoint for verification, so if required but missing, will lead to exception. - CodeVerifier verifier = verifierCache.remove(state); + CodeVerifier verifier = verifierCache.getIfPresent(state); // Create grant object - again, this is null-safe for the verifier AuthorizationGrant codeGrant = new AuthorizationCodeGrant( diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index 144be72c91a..1122b64c139 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -125,6 +125,8 @@ public enum JvmSettings { SCOPE_OIDC_PKCE(SCOPE_OIDC, "pkce"), OIDC_PKCE_ENABLED(SCOPE_OIDC_PKCE, "enabled"), OIDC_PKCE_METHOD(SCOPE_OIDC_PKCE, "method"), + OIDC_PKCE_CACHE_MAXSIZE(SCOPE_OIDC_PKCE, "max-cache-size"), + OIDC_PKCE_CACHE_MAXAGE(SCOPE_OIDC_PKCE, "max-cache-age"), // UI SETTINGS SCOPE_UI(PREFIX, "ui"), diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index 3e166d0527f..38a4d8df0ab 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -54,3 +54,7 @@ dataverse.pid.datacite.rest-api-url=https://api.test.datacite.org # Handle.Net dataverse.pid.handlenet.index=300 + +# AUTHENTICATION +dataverse.auth.oidc.pkce.max-cache-size=10000 +dataverse.auth.oidc.pkce.max-cache-age=300 \ No newline at end of file From 8d4a75e8236298d787fdf738512c615c13c3654e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 01:51:39 +0200 Subject: [PATCH 0232/1525] docs(oidc): describe new config options for PKCE verifier cache --- doc/sphinx-guides/source/installation/oidc.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/doc/sphinx-guides/source/installation/oidc.rst b/doc/sphinx-guides/source/installation/oidc.rst index 0dc5ca4ff4a..e036e9c8470 100644 --- a/doc/sphinx-guides/source/installation/oidc.rst +++ b/doc/sphinx-guides/source/installation/oidc.rst @@ -168,3 +168,12 @@ The following options are available: - A subtitle, currently not displayed by the UI. - N - ``OpenID Connect`` + * - ``dataverse.auth.oidc.pkce.max-cache-size`` + - Tune the maximum size of all OIDC providers' verifier cache (= number of outstanding PKCE-enabled auth responses). + - N + - 10000 + * - ``dataverse.auth.oidc.pkce.max-cache-age`` + - Tune the maximum age of all OIDC providers' verifier cache entries. Default is 5 minutes, equivalent to lifetime + of many OIDC access tokens. + - N + - 300 \ No newline at end of file From 8ca25fff4f4e1801c97ff28ba1947e4d05bfc915 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 01:56:47 +0200 Subject: [PATCH 0233/1525] docs(oidc,test): add release note --- .../9268-8349-oidc-improvements.md | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 doc/release-notes/9268-8349-oidc-improvements.md diff --git a/doc/release-notes/9268-8349-oidc-improvements.md b/doc/release-notes/9268-8349-oidc-improvements.md new file mode 100644 index 00000000000..cb0a9685c69 --- /dev/null +++ b/doc/release-notes/9268-8349-oidc-improvements.md @@ -0,0 +1,28 @@ +## OpenID Connect Authentication Provider Improvements + +### Using MicroProfile Config For Provisioning + +With this release it is possible to provision a single OIDC-based authentication provider +by using MicroProfile Config instead of or in addition to the classic Admin API provisioning. + +If you are using an external OIDC provider component as identity management system and/or broker +to other authentication providers such as Google, eduGain SAML and so on, this might make your +life easier during instance setups and reconfiguration. You no longer need to generate the +necessary JSON file. + +### Adding PKCE Support + +Some OIDC providers require using PKCE as additional security layer. As of this version, you can enable +support for this on any OIDC provider you configure. (Note that OAuth2 providers have not been upgraded.) + +## Improved Testing + +With this release, we add a new type of testing to Dataverse: integration tests which are no end-to-end tests +like our API tests. Starting with OIDC authentication support, we test regularly on CI for working condition +of both OIDC login options in UI and API. + +The testing and development Keycloak realm has been updated with more users and compatibility with Keycloak 21. + +The support for setting JVM options during testing has been improved for developers. You now may add the +`@JvmSetting` annotation to classes (also inner classes) and reference factory methods for values. This improvement is +also paving the way to enable manipulating JVM options during end-to-end tests on remote ends. From 7319ae6d16bfb3fbb886285f58a9f28ba324e4c5 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Wed, 24 May 2023 12:59:30 +0200 Subject: [PATCH 0234/1525] mutch faster reindexing of datasets by reducing the number of needed queries --- .../iq/dataverse/DatasetServiceBean.java | 1 + .../iq/dataverse/search/IndexServiceBean.java | 39 ++++++++++++------- 2 files changed, 27 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 30dd165e22f..f9dcaad1373 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -116,6 +116,7 @@ public Dataset findDeep(Object pk) { .setHint("eclipselink.left-join-fetch", "o.files.ingestReports") .setHint("eclipselink.left-join-fetch", "o.files.dataFileTags") .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas") + .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas.fileCategories") .setHint("eclipselink.left-join-fetch", "o.files.guestbookResponses") .setHint("eclipselink.left-join-fetch", "o.files.embargo") .setHint("eclipselink.left-join-fetch", "o.files.fileAccessRequests") diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 23e1f4a1a99..22471cf0105 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -243,7 +243,7 @@ public Future indexDataverse(Dataverse dataverse, boolean processPaths) solrInputDocument.addField(SearchFields.SOURCE, HARVESTED); } else { (this means that all dataverses are "local" - should this be removed? */ solrInputDocument.addField(SearchFields.IS_HARVESTED, false); - solrInputDocument.addField(SearchFields.METADATA_SOURCE, findRootDataverseCached().getName()); //rootDataverseName); + solrInputDocument.addField(SearchFields.METADATA_SOURCE, rootDataverse.getName()); //rootDataverseName); /*}*/ addDataverseReleaseDateToSolrDoc(solrInputDocument, dataverse); @@ -437,7 +437,8 @@ public void indexDvObject(DvObject objectIn) throws SolrServerException, IOExce } private void indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws SolrServerException, IOException { - doIndexDataset(dataset, doNormalSolrDocCleanUp); + Dataset deep = datasetService.findDeep(dataset.getId()); + doIndexDataset(deep, doNormalSolrDocCleanUp); updateLastIndexedTime(dataset.getId()); } @@ -817,10 +818,15 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set paths = object.isInstanceofDataset() ? retrieveDVOPaths(datasetService.find(object.getId())) + Dataset dataset = null; + if (object.isInstanceofDataset()) { + dataset = datasetService.findDeep(object.getId()); + } + List paths = object.isInstanceofDataset() ? retrieveDVOPaths(dataset) : retrieveDVOPaths(dataverseService.find(object.getId())); sid.removeField(SearchFields.SUBTREE); @@ -1694,7 +1707,7 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc UpdateResponse addResponse = solrClientService.getSolrClient().add(sid); UpdateResponse commitResponse = solrClientService.getSolrClient().commit(); if (object.isInstanceofDataset()) { - for (DataFile df : datasetService.find(object.getId()).getFiles()) { + for (DataFile df : dataset.getFiles()) { solrQuery.setQuery(SearchUtil.constructQuery(SearchFields.ENTITY_ID, df.getId().toString())); res = solrClientService.getSolrClient().query(solrQuery); if (!res.getResults().isEmpty()) { From 8f8373859236e18b97612cc3330c4614ff9303f9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 24 May 2023 09:40:20 -0400 Subject: [PATCH 0235/1525] set expectations about S3 direct upload behavior #9601 --- .../source/developers/big-data-support.rst | 11 +++++++++++ doc/sphinx-guides/source/user/dataset-management.rst | 4 ++++ 2 files changed, 15 insertions(+) diff --git a/doc/sphinx-guides/source/developers/big-data-support.rst b/doc/sphinx-guides/source/developers/big-data-support.rst index 734f678ceb9..4d409b407f7 100644 --- a/doc/sphinx-guides/source/developers/big-data-support.rst +++ b/doc/sphinx-guides/source/developers/big-data-support.rst @@ -36,6 +36,17 @@ At present, one potential drawback for direct-upload is that files are only part ``./asadmin create-jvm-options "-Ddataverse.files..ingestsizelimit="`` +.. _s3-direct-upload-features-disabled: + +Features that are Disabled if S3 Direct Upload is Enabled +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The following features are disabled when S3 direct upload is enabled. + +- Unzipping of zip files. (See :ref:`compressed-files`.) +- Extraction of metadata from FITS files. (See :ref:`fits`.) +- Creation of NcML auxiliary files (See :ref:`netcdf-and-hdf5`.) + .. _cors-s3-bucket: Allow CORS for S3 Buckets diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 9223768b49f..bf6e118cc21 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -329,6 +329,8 @@ You can also search for files within datasets that have been tagged as "Workflow |cw-image6| +.. _fits: + Astronomy (FITS) ---------------- @@ -374,6 +376,8 @@ Please note the following rules regarding these fields: If the bounding box was successfully populated, :ref:`geospatial-search` should be able to find it. +.. _compressed-files: + Compressed Files ---------------- From 60c1031cc10e3f8dcf4250853616f74f1f6c1464 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 24 May 2023 10:27:32 -0400 Subject: [PATCH 0236/1525] a couple of extra fixes. #8889 --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 3 ++- src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 06747a9f142..9a09a298ac5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -588,7 +588,7 @@ public Response deleteDataverse(@Context ContainerRequestContext crc, @PathParam @AuthRequired @Path("{identifier}/attribute/{attribute}") public Response updateAttribute(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier, - @PathParam("attribute") String attribute, @NotNull String value) { + @PathParam("attribute") String attribute, @QueryParam("value") String value) { try { Dataverse collection = findDataverseOrDie(identifier); User user = getRequestUser(crc); @@ -622,6 +622,7 @@ public Response updateAttribute(@Context ContainerRequestContext crc, @PathParam */ case "filePIDsEnabled": collection.setFilePIDsEnabled(parseBooleanOrDie(value)); + break; default: return badRequest("'" + attribute + "' is not a supported attribute"); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 8b0afab01d6..476cf16854c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2079,7 +2079,7 @@ public void testFilePIDsBehavior() { // And confirm that the file didn't get a PID: - fileInfoResponse = UtilIT.getFileData(origFileId.toString(), apiToken); + fileInfoResponse = UtilIT.getFileData(newFileId.toString(), apiToken); fileInfoResponse.then().assertThat().statusCode(OK.getStatusCode()); fileInfoResponseString = fileInfoResponse.body().asString(); msg(fileInfoResponseString); From 14ce4466f8ea1ff8a2f97a78d554fb708efbc769 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 24 May 2023 10:37:03 -0400 Subject: [PATCH 0237/1525] cosmetic. #8889 --- src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 476cf16854c..f4f88cdbc85 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2043,8 +2043,8 @@ public void testFilePIDsBehavior() { // Publish dataverse and dataset // ------------------------- msg("Publish dataverse and dataset"); - Response publishDataversetResp = UtilIT.publishDataverseViaSword(collectionAlias, apiToken); - publishDataversetResp.then().assertThat() + Response publishCollectionResp = UtilIT.publishDataverseViaSword(collectionAlias, apiToken); + publishCollectionResp.then().assertThat() .statusCode(OK.getStatusCode()); Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); From 78d68e2b3c4fdf7dde5ac1c529643a44136f135e Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 24 May 2023 13:18:31 -0400 Subject: [PATCH 0238/1525] A bug in the restassured test. #8889 --- src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index f4f88cdbc85..0b9ae8a77d4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2084,7 +2084,7 @@ public void testFilePIDsBehavior() { fileInfoResponseString = fileInfoResponse.body().asString(); msg(fileInfoResponseString); - assertNull(JsonPath.from(fileInfoResponseString).getString("data.dataFile.persistentId")); + org.junit.Assert.assertEquals("The file was NOT supposed to be issued a PID", "", JsonPath.from(fileInfoResponseString).getString("data.dataFile.persistentId")); } } From ae8b4388af34dd170050426b602a83d0104f41c1 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 24 May 2023 14:47:57 -0400 Subject: [PATCH 0239/1525] A better test for the persistent id. #8889 --- src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 0b9ae8a77d4..52a6b28b324 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -13,6 +13,7 @@ import static edu.harvard.iq.dataverse.api.AccessIT.apiToken; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.File; import java.io.IOException; @@ -2019,6 +2020,8 @@ public void testDeleteFile() { .statusCode(OK.getStatusCode()); } + // The following specifically tests file-level PIDs configuration in + // individual collections (#8889/#9614) @Test public void testFilePIDsBehavior() { // Create user @@ -2057,7 +2060,8 @@ public void testFilePIDsBehavior() { String fileInfoResponseString = fileInfoResponse.body().asString(); msg(fileInfoResponseString); - assertNotNull(JsonPath.from(fileInfoResponseString).getString("data.dataFile.persistentId")); + String origFilePersistentId = JsonPath.from(fileInfoResponseString).getString("data.dataFile.persistentId"); + assertTrue("The file did not get a persistent identifier assigned (check that file PIDs are enabled instance-wide!)", StringUtil.nonEmpty(origFilePersistentId)); // Now change the file PIDs registration configuration for the collection: From 505e8f236c903f9955917dda576bcae4e269426f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 24 May 2023 16:59:32 -0400 Subject: [PATCH 0240/1525] switch to mime type check #9601 --- .../dataverse/ingest/IngestServiceBean.java | 52 +++---------------- .../harvard/iq/dataverse/util/FileUtil.java | 4 ++ 2 files changed, 11 insertions(+), 45 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 9d3e7fb1161..560843a7e71 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -1185,52 +1185,14 @@ public boolean fileMetadataExtractable(DataFile dataFile) { // Inspired by fileMetadataExtractable, above public boolean fileMetadataExtractableFromNetcdf(DataFile dataFile, Path tempLocationPath) { logger.fine("fileMetadataExtractableFromNetcdf dataFileIn: " + dataFile + ". tempLocationPath: " + tempLocationPath); - boolean extractable = false; - String dataFileLocation = null; - if (tempLocationPath != null) { - // This file was just uploaded and hasn't been saved to S3 or local storage. - dataFileLocation = tempLocationPath.toString(); - } else { - // This file is already on S3 or local storage. - File tempFile = null; - File localFile; - StorageIO storageIO; - try { - storageIO = dataFile.getStorageIO(); - storageIO.open(); - if (storageIO.isLocalFile()) { - localFile = storageIO.getFileSystemPath().toFile(); - dataFileLocation = localFile.getAbsolutePath(); - logger.info("fileMetadataExtractable2: file is local. Path: " + dataFileLocation); - } else { - // Need to create a temporary local file: - tempFile = File.createTempFile("tempFileExtractMetadataNcml", ".tmp"); - try ( ReadableByteChannel targetFileChannel = (ReadableByteChannel) storageIO.getReadChannel(); FileChannel tempFileChannel = new FileOutputStream(tempFile).getChannel();) { - tempFileChannel.transferFrom(targetFileChannel, 0, storageIO.getSize()); - } - dataFileLocation = tempFile.getAbsolutePath(); - logger.info("fileMetadataExtractable2: file is on S3. Downloaded and saved to temp path: " + dataFileLocation); - } - } catch (IOException ex) { - logger.info("fileMetadataExtractable2, could not use storageIO for data file id " + dataFile.getId() + ". Exception: " + ex); - } - } - if (dataFileLocation != null) { - try ( NetcdfFile netcdfFile = NetcdfFiles.open(dataFileLocation)) { - logger.info("fileMetadataExtractable2: trying to open " + dataFileLocation); - if (netcdfFile != null) { - logger.info("fileMetadataExtractable2: returning true"); - extractable = true; - } else { - logger.info("NetcdfFiles.open() could not open file id " + dataFile.getId() + " (null returned)."); - } - } catch (IOException ex) { - logger.info("NetcdfFiles.open() could not open file id " + dataFile.getId() + ". Exception caught: " + ex); - } - } else { - logger.info("dataFileLocation is null for file id " + dataFile.getId() + ". Can't extract NcML."); + logger.info("fileMetadataExtractableFromNetcdf dataFileIn: " + dataFile + ". tempLocationPath: " + tempLocationPath + ". contentType: " + dataFile.getContentType()); + if (dataFile.getContentType() != null + && (dataFile.getContentType().equals(FileUtil.MIME_TYPE_NETCDF) + || dataFile.getContentType().equals(FileUtil.MIME_TYPE_XNETCDF) + || dataFile.getContentType().equals(FileUtil.MIME_TYPE_HDF5))) { + return true; } - return extractable; + return false; } /* diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 6bb7e1d583b..7137db9ca78 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -182,6 +182,10 @@ public class FileUtil implements java.io.Serializable { //Todo - this is the same as MIME_TYPE_TSV_ALT public static final String MIME_TYPE_INGESTED_FILE = "text/tab-separated-values"; + public static final String MIME_TYPE_NETCDF = "application/netcdf"; + public static final String MIME_TYPE_XNETCDF = "application/x-netcdf"; + public static final String MIME_TYPE_HDF5 = "application/x-hdf5"; + // File type "thumbnail classes" tags: public static final String FILE_THUMBNAIL_CLASS_AUDIO = "audio"; From 6d06e32208de7cd03f28e8a101aa4fddafda8267 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 24 May 2023 19:32:44 -0400 Subject: [PATCH 0241/1525] reverting the test check. #8889 --- src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 52a6b28b324..43dc1381221 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2061,7 +2061,7 @@ public void testFilePIDsBehavior() { msg(fileInfoResponseString); String origFilePersistentId = JsonPath.from(fileInfoResponseString).getString("data.dataFile.persistentId"); - assertTrue("The file did not get a persistent identifier assigned (check that file PIDs are enabled instance-wide!)", StringUtil.nonEmpty(origFilePersistentId)); + assertNotNull("The file did not get a persistent identifier assigned (check that file PIDs are enabled instance-wide!)", origFilePersistentId); // Now change the file PIDs registration configuration for the collection: From 18076ede66b95907f2b7996b7c8a834d9b169a94 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 25 May 2023 15:52:04 -0400 Subject: [PATCH 0242/1525] add dataverse.netcdf.geo-extract-s3-direct-upload config #9601 By default, keep S3 direct upload fast. Don't download NetCDF or HDF5 files to try to pull geospatial metadata out of them when S3 direct upload is configured. If you really want this, add this setting and make it true. --- doc/release-notes/9331-extract-bounding-box.md | 4 ++++ doc/sphinx-guides/source/developers/big-data-support.rst | 1 + doc/sphinx-guides/source/installation/config.rst | 8 ++++++++ doc/sphinx-guides/source/user/dataset-management.rst | 1 + .../harvard/iq/dataverse/ingest/IngestServiceBean.java | 7 +++++++ .../edu/harvard/iq/dataverse/settings/JvmSettings.java | 4 ++++ 6 files changed, 25 insertions(+) diff --git a/doc/release-notes/9331-extract-bounding-box.md b/doc/release-notes/9331-extract-bounding-box.md index c4ff83e40c0..dfd2c4cadb7 100644 --- a/doc/release-notes/9331-extract-bounding-box.md +++ b/doc/release-notes/9331-extract-bounding-box.md @@ -1 +1,5 @@ An attempt will be made to extract a geospatial bounding box (west, south, east, north) from NetCDF and HDF5 files and then insert these values into the geospatial metadata block, if enabled. + +The following JVM setting has been added: + +- dataverse.netcdf.geo-extract-s3-direct-upload diff --git a/doc/sphinx-guides/source/developers/big-data-support.rst b/doc/sphinx-guides/source/developers/big-data-support.rst index 4d409b407f7..b238a7623eb 100644 --- a/doc/sphinx-guides/source/developers/big-data-support.rst +++ b/doc/sphinx-guides/source/developers/big-data-support.rst @@ -46,6 +46,7 @@ The following features are disabled when S3 direct upload is enabled. - Unzipping of zip files. (See :ref:`compressed-files`.) - Extraction of metadata from FITS files. (See :ref:`fits`.) - Creation of NcML auxiliary files (See :ref:`netcdf-and-hdf5`.) +- Extraction of a geospatial bounding box from NetCDF and HDF5 files (see :ref:`netcdf-and-hdf5`) unless :ref:`dataverse.netcdf.geo-extract-s3-direct-upload` is set to true. .. _cors-s3-bucket: diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index f8aef8c59da..b2f9d6b5150 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2419,6 +2419,14 @@ Defaults to ``false``. Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_UI_SHOW_VALIDITY_FILTER``. Will accept ``[tT][rR][uU][eE]|1|[oO][nN]`` as "true" expressions. +.. _dataverse.netcdf.geo-extract-s3-direct-upload: + +dataverse.netcdf.geo-extract-s3-direct-upload ++++++++++++++++++++++++++++++++++++++++++++++ + +This setting was added to keep S3 direct upload lightweight. When that feature is enabled and you still want NetCDF and HDF5 files to go through metadata extraction of a Geospatial Bounding Box (see :ref:`netcdf-and-hdf5`), which requires the file to be downloaded from S3 in this scenario, make this setting true. + +See also :ref:`s3-direct-upload-features-disabled`. .. _feature-flags: diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index bf6e118cc21..a4f6251f11f 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -373,6 +373,7 @@ Please note the following rules regarding these fields: - If West Longitude and East Longitude are both over 180 (outside the expected -180:180 range), 360 will be subtracted to shift the values from the 0:360 range to the expected -180:180 range. - If either West Longitude or East Longitude are less than zero but the other longitude is greater than 180 (which would imply an indeterminate domain, a lack of clarity of if the domain is -180:180 or 0:360), metadata will be not be extracted. - If the bounding box was successfully populated, the subsequent removal of the NetCDF or HDF5 file from the dataset does not automatically remove the bounding box from the dataset metadata. You must remove the bounding box manually, if desired. +- This feature is disabled if S3 direct upload is enabled (see :ref:`s3-direct-upload-features-disabled`) unless :ref:`dataverse.netcdf.geo-extract-s3-direct-upload` has been set to true. If the bounding box was successfully populated, :ref:`geospatial-search` should be able to find it. diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 560843a7e71..c9e2cb3115d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -69,6 +69,7 @@ import edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.sav.SAVFileReaderSpi; import edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.por.PORFileReader; import edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.por.PORFileReaderSpi; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.*; import org.apache.commons.io.IOUtils; @@ -105,6 +106,7 @@ import java.util.ListIterator; import java.util.logging.Logger; import java.util.Hashtable; +import java.util.Optional; import javax.ejb.EJB; import javax.ejb.Stateless; import javax.inject.Named; @@ -1280,6 +1282,11 @@ public boolean extractMetadataFromNetcdf(String tempFileLocation, DataFile dataF dataFileLocation = localFile.getAbsolutePath(); logger.info("extractMetadataFromNetcdf: file is local. Path: " + dataFileLocation); } else { + Optional allow = JvmSettings.GEO_EXTRACT_S3_DIRECT_UPLOAD.lookupOptional(Boolean.class); + if (!(allow.isPresent() && allow.get())) { + logger.info("extractMetadataFromNetcdf: skipping because of config is set to not slow down S3 remote upload."); + return false; + } // Need to create a temporary local file: tempFile = File.createTempFile("tempFileExtractMetadataNetcdf", ".tmp"); try ( ReadableByteChannel targetFileChannel = (ReadableByteChannel) storageIO.getReadChannel(); FileChannel tempFileChannel = new FileOutputStream(tempFile).getChannel();) { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index 1f2f84bc256..62da27671b9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -118,6 +118,10 @@ public enum JvmSettings { SCOPE_UI(PREFIX, "ui"), UI_ALLOW_REVIEW_INCOMPLETE(SCOPE_UI, "allow-review-for-incomplete"), UI_SHOW_VALIDITY_FILTER(SCOPE_UI, "show-validity-filter"), + + // NetCDF SETTINGS + SCOPE_NETCDF(PREFIX, "netcdf"), + GEO_EXTRACT_S3_DIRECT_UPLOAD(SCOPE_NETCDF, "geo-extract-s3-direct-upload"), ; private static final String SCOPE_SEPARATOR = "."; From 5a0bf2441addb9b8f8de7ba96b1af973d810420a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 25 May 2023 16:01:14 -0400 Subject: [PATCH 0243/1525] turn down logging #9601 --- .../dataverse/ingest/IngestServiceBean.java | 28 ++++++++----------- .../netcdf/NetcdfFileMetadataExtractor.java | 4 +-- 2 files changed, 14 insertions(+), 18 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index c9e2cb3115d..fc68454aba9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -1186,8 +1186,7 @@ public boolean fileMetadataExtractable(DataFile dataFile) { // Inspired by fileMetadataExtractable, above public boolean fileMetadataExtractableFromNetcdf(DataFile dataFile, Path tempLocationPath) { - logger.fine("fileMetadataExtractableFromNetcdf dataFileIn: " + dataFile + ". tempLocationPath: " + tempLocationPath); - logger.info("fileMetadataExtractableFromNetcdf dataFileIn: " + dataFile + ". tempLocationPath: " + tempLocationPath + ". contentType: " + dataFile.getContentType()); + logger.fine("fileMetadataExtractableFromNetcdf dataFileIn: " + dataFile + ". tempLocationPath: " + tempLocationPath + ". contentType: " + dataFile.getContentType()); if (dataFile.getContentType() != null && (dataFile.getContentType().equals(FileUtil.MIME_TYPE_NETCDF) || dataFile.getContentType().equals(FileUtil.MIME_TYPE_XNETCDF) @@ -1267,10 +1266,10 @@ public boolean extractMetadataFromNetcdf(String tempFileLocation, DataFile dataF String dataFileLocation = null; if (tempFileLocation != null) { - logger.info("tempFileLocation is non null. Setting dataFileLocation to " + tempFileLocation); + logger.fine("tempFileLocation is non null. Setting dataFileLocation to " + tempFileLocation); dataFileLocation = tempFileLocation; } else { - logger.info("tempFileLocation is null. Perhaps the file is alrady on disk or S3 direct upload is enabled."); + logger.fine("tempFileLocation is null. Perhaps the file is alrady on disk or S3 direct upload is enabled."); File tempFile = null; File localFile; StorageIO storageIO; @@ -1280,11 +1279,11 @@ public boolean extractMetadataFromNetcdf(String tempFileLocation, DataFile dataF if (storageIO.isLocalFile()) { localFile = storageIO.getFileSystemPath().toFile(); dataFileLocation = localFile.getAbsolutePath(); - logger.info("extractMetadataFromNetcdf: file is local. Path: " + dataFileLocation); + logger.fine("extractMetadataFromNetcdf: file is local. Path: " + dataFileLocation); } else { Optional allow = JvmSettings.GEO_EXTRACT_S3_DIRECT_UPLOAD.lookupOptional(Boolean.class); if (!(allow.isPresent() && allow.get())) { - logger.info("extractMetadataFromNetcdf: skipping because of config is set to not slow down S3 remote upload."); + logger.fine("extractMetadataFromNetcdf: skipping because of config is set to not slow down S3 remote upload."); return false; } // Need to create a temporary local file: @@ -1293,7 +1292,7 @@ public boolean extractMetadataFromNetcdf(String tempFileLocation, DataFile dataF tempFileChannel.transferFrom(targetFileChannel, 0, storageIO.getSize()); } dataFileLocation = tempFile.getAbsolutePath(); - logger.info("extractMetadataFromNetcdf: file is on S3. Downloaded and saved to temp path: " + dataFileLocation); + logger.fine("extractMetadataFromNetcdf: file is on S3. Downloaded and saved to temp path: " + dataFileLocation); } } catch (IOException ex) { logger.info("extractMetadataFromNetcdf, could not use storageIO for data file id " + dataFile.getId() + ". Exception: " + ex); @@ -1309,7 +1308,7 @@ public boolean extractMetadataFromNetcdf(String tempFileLocation, DataFile dataF // Locate metadata extraction plugin for the file format by looking // it up with the Ingest Service Provider Registry: NetcdfFileMetadataExtractor extractorPlugin = new NetcdfFileMetadataExtractor(); - logger.info("creating file from " + dataFileLocation); + logger.fine("creating file from " + dataFileLocation); File file = new File(dataFileLocation); FileMetadataIngest extractedMetadata = extractorPlugin.ingestFile(file); Map> extractedMetadataMap = extractedMetadata.getMetadataMap(); @@ -1347,11 +1346,11 @@ public boolean extractMetadataNcml(DataFile dataFile, Path tempLocationPath) { InputStream inputStream = null; String dataFileLocation = null; if (tempLocationPath != null) { - logger.info("extractMetadataNcml: tempLocationPath is non null. Setting dataFileLocation to " + tempLocationPath); + logger.fine("extractMetadataNcml: tempLocationPath is non null. Setting dataFileLocation to " + tempLocationPath); // This file was just uploaded and hasn't been saved to S3 or local storage. dataFileLocation = tempLocationPath.toString(); } else { - logger.info("extractMetadataNcml: tempLocationPath null. Calling getExistingFile for dataFileLocation."); + logger.fine("extractMetadataNcml: tempLocationPath null. Calling getExistingFile for dataFileLocation."); dataFileLocation = getExistingFile(dataFile, dataFileLocation); } if (dataFileLocation != null) { @@ -1423,8 +1422,7 @@ private String getExistingFile(DataFile dataFile, String dataFileLocation) { if (storageIO.isLocalFile()) { localFile = storageIO.getFileSystemPath().toFile(); dataFileLocation = localFile.getAbsolutePath(); - logger.fine("extractMetadataNcml: file is local. Path: " + dataFileLocation); - logger.info("getExistingFile: file is local. Path: " + dataFileLocation); + logger.fine("getExistingFile: file is local. Path: " + dataFileLocation); } else { // Need to create a temporary local file: tempFile = File.createTempFile("tempFileExtractMetadataNcml", ".tmp"); @@ -1432,12 +1430,10 @@ private String getExistingFile(DataFile dataFile, String dataFileLocation) { tempFileChannel.transferFrom(targetFileChannel, 0, storageIO.getSize()); } dataFileLocation = tempFile.getAbsolutePath(); - logger.fine("extractMetadataNcml: file is on S3. Downloaded and saved to temp path: " + dataFileLocation); - logger.info("getExistingFile: file is on S3. Downloaded and saved to temp path: " + dataFileLocation); + logger.fine("getExistingFile: file is on S3. Downloaded and saved to temp path: " + dataFileLocation); } } catch (IOException ex) { - logger.info("While attempting to extract NcML, could not use storageIO for data file id " + dataFile.getId() + ". Exception: " + ex); - logger.info("getExistingFile: While attempting to extract NcML, could not use storageIO for data file id " + dataFile.getId() + ". Exception: " + ex); + logger.fine("getExistingFile: While attempting to extract NcML, could not use storageIO for data file id " + dataFile.getId() + ". Exception: " + ex); } return dataFileLocation; } diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/NetcdfFileMetadataExtractor.java b/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/NetcdfFileMetadataExtractor.java index 66f0c25f3d7..9221a6ca679 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/NetcdfFileMetadataExtractor.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/NetcdfFileMetadataExtractor.java @@ -56,7 +56,7 @@ public FileMetadataIngest ingestFile(File file) throws IOException { String northLatitudeFinal = geoFields.get(NORTH_LATITUDE); String southLatitudeFinal = geoFields.get(SOUTH_LATITUDE); - logger.info(getLineStringsUrl(westLongitudeFinal, southLatitudeFinal, eastLongitudeFinal, northLatitudeFinal)); + logger.fine(getLineStringsUrl(westLongitudeFinal, southLatitudeFinal, eastLongitudeFinal, northLatitudeFinal)); Map> metadataMap = new HashMap<>(); metadataMap.put(WEST_LONGITUDE, new HashSet<>()); @@ -102,7 +102,7 @@ private Map parseGeospatial(NetcdfFile netcdfFile) { geoFields.put(DatasetFieldConstant.northLatitude, getValue(northLatitude)); geoFields.put(DatasetFieldConstant.southLatitude, getValue(southLatitude)); - logger.info(getLineStringsUrl( + logger.fine(getLineStringsUrl( geoFields.get(DatasetFieldConstant.westLongitude), geoFields.get(DatasetFieldConstant.southLatitude), geoFields.get(DatasetFieldConstant.eastLongitude), From a556b69ec0b2ffd4d1b02724fa9dd75296da5699 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 25 May 2023 16:10:48 -0400 Subject: [PATCH 0244/1525] renaming the flyway script 5.13.0.2, hopefully this will make it into v5.14. #8889 --- ...ollections.sql => V5.13.0.2__8889-filepids-in-collections.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V5.14.0.1__8889-filepids-in-collections.sql => V5.13.0.2__8889-filepids-in-collections.sql} (100%) diff --git a/src/main/resources/db/migration/V5.14.0.1__8889-filepids-in-collections.sql b/src/main/resources/db/migration/V5.13.0.2__8889-filepids-in-collections.sql similarity index 100% rename from src/main/resources/db/migration/V5.14.0.1__8889-filepids-in-collections.sql rename to src/main/resources/db/migration/V5.13.0.2__8889-filepids-in-collections.sql From 8b2937e80e53528106d12ea7e8ed7204c7b7ee2c Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 25 May 2023 17:35:23 -0400 Subject: [PATCH 0245/1525] Add call to populate file lists --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index b8d2507bc6b..c857d00a27d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -370,6 +370,7 @@ public void setShowIngestSuccess(boolean showIngestSuccess) { public void setTermsGuestbookPopupAction(String popupAction){ if(popupAction != null && popupAction.length() > 0){ + logger.info("TGPA set to " + popupAction); this.termsGuestbookPopupAction = popupAction; } @@ -5178,6 +5179,9 @@ public boolean isFileAccessRequestMultiButtonEnabled(){ if (!isSessionUserAuthenticated() || !dataset.isFileAccessRequest()){ return false; } + //populate file lists + filterSelectedFiles(); + if( this.selectedRestrictedFiles == null || this.selectedRestrictedFiles.isEmpty() ){ return false; } From 7ddc0466a767b292e8dfe6625feea770b112ded5 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 25 May 2023 17:35:42 -0400 Subject: [PATCH 0246/1525] hide accept terms buttons in download case --- src/main/webapp/guestbook-terms-popup-fragment.xhtml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index 5616fa48280..bdaa6d92432 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -41,7 +41,7 @@
    -
    +
    @@ -51,6 +51,7 @@
    + Date: Thu, 25 May 2023 17:49:17 -0400 Subject: [PATCH 0247/1525] handle changes from #6919 --- src/main/webapp/guestbook-terms-popup-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index bdaa6d92432..9c3391ef9ae 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -60,7 +60,7 @@ From 90186edc47772a2aa39089dc1607caecf3c3917a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 25 May 2023 19:47:11 -0400 Subject: [PATCH 0248/1525] fix query --- .../edu/harvard/iq/dataverse/DataFileServiceBean.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 449e8d351c6..f4a33e4f12f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -861,10 +861,11 @@ private List retrieveFileAccessRequesters(DataFile fileIn) { // List requesters = em.createNativeQuery("select authenticated_user_id // from fileaccessrequests where datafile_id = // "+fileIn.getId()).getResultList(); - List requesters = em.createNativeQuery("select authenticated_user_id from fileaccessrequests where datafile_id = " + fileIn.getId() + " and request_state='CREATED'").getResultList(); - - for (Object userIdObj : requesters) { - Long userId = (Long) userIdObj; + TypedQuery typedQuery = em.createQuery("select f.user.id from FileAccessRequest f where f.dataFile.id = :file_id and f.requestState= :requestState", Long.class); + typedQuery.setParameter("file_id", fileIn.getId()); + typedQuery.setParameter("requestState", FileAccessRequest.RequestState.CREATED); + List requesters = typedQuery.getResultList(); + for (Long userId : requesters) { AuthenticatedUser user = userService.find(userId); if (user != null) { retList.add(user); From 9d9d74ea4c98021ca4c5baa277f6a2227440c141 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 26 May 2023 10:56:38 +0100 Subject: [PATCH 0249/1525] Added: license icon URI to dataset version payload when it exists --- .../iq/dataverse/util/json/JsonPrinter.java | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 700a54d5e13..68622e7af01 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -375,10 +375,7 @@ public static JsonObjectBuilder json(DatasetVersion dsv, List anonymized .add("createTime", format(dsv.getCreateTime())); License license = DatasetUtil.getLicense(dsv); if (license != null) { - // Standard license - bld.add("license", jsonObjectBuilder() - .add("name", DatasetUtil.getLicenseName(dsv)) - .add("uri", DatasetUtil.getLicenseURI(dsv))); + bld.add("license", jsonLicense(dsv)); } else { // Custom terms bld.add("termsOfUse", dsv.getTermsOfUseAndAccess().getTermsOfUse()) @@ -1027,4 +1024,15 @@ public static JsonObjectBuilder jsonLinkset(Dataset ds) { .add("publicationDate", ds.getPublicationDateFormattedYYYYMMDD()) .add("storageIdentifier", ds.getStorageIdentifier()); } + + private static JsonObjectBuilder jsonLicense(DatasetVersion dsv) { + JsonObjectBuilder licenseJsonObjectBuilder = jsonObjectBuilder() + .add("name", DatasetUtil.getLicenseName(dsv)) + .add("uri", DatasetUtil.getLicenseURI(dsv)); + String licenseIconUri = DatasetUtil.getLicenseIcon(dsv); + if (licenseIconUri != null) { + licenseJsonObjectBuilder.add("iconUri", licenseIconUri); + } + return licenseJsonObjectBuilder; + } } From e4d29b4f873d7fe8ef16188196ebdb40bda099a1 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 26 May 2023 14:51:09 +0200 Subject: [PATCH 0250/1525] added comment --- src/main/java/edu/harvard/iq/dataverse/Dataset.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index d5a30ed84e3..f9c839a0fff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -43,6 +43,8 @@ * @author skraffmiller */ @NamedQueries({ + // Dataset.findById should only be used if you're going to iterate over files (otherwise, lazy loading in DatasetService.find() is better). + // If you are going to iterate over files, preferably call the DatasetService.findDeep() method i.s.o. using this query directly. @NamedQuery(name = "Dataset.findById", query = "SELECT o FROM Dataset o LEFT JOIN FETCH o.files WHERE o.id=:id"), @NamedQuery(name = "Dataset.findIdStale", From 628704c4ae70e7dbec26672852c5a34bbb8c11dd Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 26 May 2023 15:35:35 +0200 Subject: [PATCH 0251/1525] added comment --- .../edu/harvard/iq/dataverse/DatasetServiceBean.java | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index f9dcaad1373..1afbffac6cb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -106,9 +106,19 @@ public Dataset find(Object pk) { return em.find(Dataset.class, pk); } + /** + * Retrieve a dataset with the deep underlying structure in one query execution. + * This is a more optimal choice when accessing files of a dataset. + * In a contrast, the find() method does not pre-fetch the file objects and results in point queries when accessing these objects. + * Since the files have a deep structure, many queries can be prevented by using the findDeep() method, especially for large datasets + * containing many files, and when iterating over all the files. + * When you are not going to access the file objects, the default find() method is better because of the lazy loading. + * @return a dataset with pre-fetched file objects + */ public Dataset findDeep(Object pk) { return (Dataset) em.createNamedQuery("Dataset.findById") .setParameter("id", pk) + // Optimization hints: retrieve all data in one query; this prevents point queries when iterating over the files .setHint("eclipselink.left-join-fetch", "o.files.ingestRequest") .setHint("eclipselink.left-join-fetch", "o.files.thumbnailForDataset") .setHint("eclipselink.left-join-fetch", "o.files.dataTables") @@ -125,7 +135,7 @@ public Dataset findDeep(Object pk) { .setHint("eclipselink.left-join-fetch", "o.files.creator") .setHint("eclipselink.left-join-fetch", "o.files.alternativePersistentIndentifiers") .setHint("eclipselink.left-join-fetch", "o.files.roleAssignments") - .setLockMode(LockModeType.NONE) + .setLockMode(LockModeType.NONE) // Explicit default ostrich locking (default for em.find() and named query executions) .getSingleResult(); } From 7771e9b27ef2369867ee3e1faf5068f80d0c06cb Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 26 May 2023 15:36:06 +0200 Subject: [PATCH 0252/1525] remove explicit lock mode setting --- src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 1afbffac6cb..1794f112d8d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -135,7 +135,6 @@ public Dataset findDeep(Object pk) { .setHint("eclipselink.left-join-fetch", "o.files.creator") .setHint("eclipselink.left-join-fetch", "o.files.alternativePersistentIndentifiers") .setHint("eclipselink.left-join-fetch", "o.files.roleAssignments") - .setLockMode(LockModeType.NONE) // Explicit default ostrich locking (default for em.find() and named query executions) .getSingleResult(); } From 53fcfe513086ed82a305f16ca6bd60bb0dd0bb2b Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 26 May 2023 15:57:12 +0200 Subject: [PATCH 0253/1525] experimental query method clean up --- .../iq/dataverse/DataFileServiceBean.java | 366 ------------------ .../edu/harvard/iq/dataverse/DatasetPage.java | 10 - 2 files changed, 376 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 196f84b6877..ab4f61902c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1,7 +1,5 @@ package edu.harvard.iq.dataverse; -import edu.harvard.iq.dataverse.authorization.AccessRequest; -import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.dataaccess.StorageIO; @@ -11,19 +9,15 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.FileSortFieldAndOrder; import edu.harvard.iq.dataverse.util.FileUtil; -import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.IOException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; -import java.util.LinkedList; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; @@ -36,9 +30,7 @@ import javax.persistence.NoResultException; import javax.persistence.PersistenceContext; import javax.persistence.Query; -import javax.persistence.StoredProcedureQuery; import javax.persistence.TypedQuery; -import org.apache.commons.lang3.RandomStringUtils; /** * @@ -561,364 +553,6 @@ public DataFile findCheapAndEasy(Long id) { return dataFile; } - /* - * This is an experimental method for populating the versions of - * the datafile with the filemetadatas, optimized for making as few db - * queries as possible. - * It should only be used to retrieve filemetadata for the DatasetPage! - * It is not guaranteed to adequately perform anywhere else. - */ - - public void findFileMetadataOptimizedExperimental(Dataset owner, DatasetVersion version, AuthenticatedUser au) { - List dataFiles = new ArrayList<>(); - List dataTables = new ArrayList<>(); - //List retList = new ArrayList<>(); - - // TODO: - // replace these maps with simple lists and run binary search on them. -- 4.2.1 - - Map userMap = new HashMap<>(); - Map filesMap = new HashMap<>(); - Map datatableMap = new HashMap<>(); - Map categoryMap = new HashMap<>(); - Map> fileTagMap = new HashMap<>(); - List accessRequestFileIds = new ArrayList(); - - List fileTagLabels = DataFileTag.listTags(); - - - int i = 0; - //Cache responses - Map embargoMap = new HashMap(); - - List dataTableResults = em.createNativeQuery("SELECT t0.ID, t0.DATAFILE_ID, t0.UNF, t0.CASEQUANTITY, t0.VARQUANTITY, t0.ORIGINALFILEFORMAT, t0.ORIGINALFILESIZE, t0.ORIGINALFILENAME FROM dataTable t0, dataFile t1, dvObject t2 WHERE ((t0.DATAFILE_ID = t1.ID) AND (t1.ID = t2.ID) AND (t2.OWNER_ID = " + owner.getId() + ")) ORDER BY t0.ID").getResultList(); - - for (Object[] result : dataTableResults) { - DataTable dataTable = new DataTable(); - long fileId = ((Number) result[1]).longValue(); - - dataTable.setId(((Number) result[1]).longValue()); - - dataTable.setUnf((String)result[2]); - - dataTable.setCaseQuantity((Long)result[3]); - - dataTable.setVarQuantity((Long)result[4]); - - dataTable.setOriginalFileFormat((String)result[5]); - - dataTable.setOriginalFileSize((Long)result[6]); - - dataTable.setOriginalFileName((String)result[7]); - - dataTables.add(dataTable); - datatableMap.put(fileId, i++); - - } - - logger.fine("Retrieved "+dataTables.size()+" DataTable objects."); - - List dataTagsResults = em.createNativeQuery("SELECT t0.DATAFILE_ID, t0.TYPE FROM DataFileTag t0, dvObject t1 WHERE (t1.ID = t0.DATAFILE_ID) AND (t1.OWNER_ID="+ owner.getId() + ")").getResultList(); - for (Object[] result : dataTagsResults) { - Long datafile_id = (Long) result[0]; - Integer tagtype_id = (Integer) result[1]; - if (fileTagMap.get(datafile_id) == null) { - fileTagMap.put(datafile_id, new HashSet<>()); - } - fileTagMap.get(datafile_id).add(tagtype_id); - } - logger.fine("Retrieved "+dataTagsResults.size()+" data tags."); - dataTagsResults = null; - - //Only need to check for access requests if there is an authenticated user - if (au != null) { - List accessRequests = em.createNativeQuery("SELECT t0.ID FROM DVOBJECT t0, FILEACCESSREQUESTS t1 WHERE t1.datafile_id = t0.id and t0.OWNER_ID = " + owner.getId() + " and t1.AUTHENTICATED_USER_ID = " + au.getId() + " ORDER BY t0.ID").getResultList(); - for (Object result : accessRequests) { - accessRequestFileIds.add(Long.valueOf((Integer)result)); - } - logger.fine("Retrieved " + accessRequests.size() + " access requests."); - accessRequests = null; - } - - i = 0; - - List fileResults = em.createNativeQuery("SELECT t0.ID, t0.CREATEDATE, t0.INDEXTIME, t0.MODIFICATIONTIME, t0.PERMISSIONINDEXTIME, t0.PERMISSIONMODIFICATIONTIME, t0.PUBLICATIONDATE, t0.CREATOR_ID, t0.RELEASEUSER_ID, t1.CONTENTTYPE, t0.STORAGEIDENTIFIER, t1.FILESIZE, t1.INGESTSTATUS, t1.CHECKSUMVALUE, t1.RESTRICTED, t1.CHECKSUMTYPE, t1.PREVIOUSDATAFILEID, t1.ROOTDATAFILEID, t0.PROTOCOL, t0.AUTHORITY, t0.IDENTIFIER, t1.EMBARGO_ID FROM DVOBJECT t0, DATAFILE t1 WHERE ((t0.OWNER_ID = " + owner.getId() + ") AND ((t1.ID = t0.ID) AND (t0.DTYPE = 'DataFile'))) ORDER BY t0.ID").getResultList(); - - for (Object[] result : fileResults) { - Integer file_id = (Integer) result[0]; - - DataFile dataFile = new DataFile(); - dataFile.setMergeable(false); - - dataFile.setId(file_id.longValue()); - - Timestamp createDate = (Timestamp) result[1]; - Timestamp indexTime = (Timestamp) result[2]; - Timestamp modificationTime = (Timestamp) result[3]; - Timestamp permissionIndexTime = (Timestamp) result[4]; - Timestamp permissionModificationTime = (Timestamp) result[5]; - Timestamp publicationDate = (Timestamp) result[6]; - - dataFile.setCreateDate(createDate); - dataFile.setIndexTime(indexTime); - dataFile.setModificationTime(modificationTime); - dataFile.setPermissionIndexTime(permissionIndexTime); - dataFile.setPermissionModificationTime(permissionModificationTime); - dataFile.setPublicationDate(publicationDate); - - Long creatorId = (Long) result[7]; - if (creatorId != null) { - AuthenticatedUser creator = userMap.get(creatorId); - if (creator == null) { - creator = userService.find(creatorId); - if (creator != null) { - userMap.put(creatorId, creator); - } - } - if (creator != null) { - dataFile.setCreator(creator); - } - } - - dataFile.setOwner(owner); - - Long releaseUserId = (Long) result[8]; - if (releaseUserId != null) { - AuthenticatedUser releaseUser = userMap.get(releaseUserId); - if (releaseUser == null) { - releaseUser = userService.find(releaseUserId); - if (releaseUser != null) { - userMap.put(releaseUserId, releaseUser); - } - } - if (releaseUser != null) { - dataFile.setReleaseUser(releaseUser); - } - } - - String contentType = (String) result[9]; - - if (contentType != null) { - dataFile.setContentType(contentType); - } - - String storageIdentifier = (String) result[10]; - - if (storageIdentifier != null) { - dataFile.setStorageIdentifier(storageIdentifier); - } - - Long fileSize = (Long) result[11]; - - if (fileSize != null) { - dataFile.setFilesize(fileSize); - } - - if (result[12] != null) { - String ingestStatusString = (String) result[12]; - dataFile.setIngestStatus(ingestStatusString.charAt(0)); - } - - String md5 = (String) result[13]; - - if (md5 != null) { - dataFile.setChecksumValue(md5); - } - - Boolean restricted = (Boolean) result[14]; - if (restricted != null) { - dataFile.setRestricted(restricted); - } - - String checksumType = (String) result[15]; - if (checksumType != null) { - try { - // In the database we store "SHA1" rather than "SHA-1". - DataFile.ChecksumType typeFromStringInDatabase = DataFile.ChecksumType.valueOf(checksumType); - dataFile.setChecksumType(typeFromStringInDatabase); - } catch (IllegalArgumentException ex) { - logger.info("Exception trying to convert " + checksumType + " to enum: " + ex); - } - } - - Long previousDataFileId = (Long) result[16]; - if (previousDataFileId != null) { - dataFile.setPreviousDataFileId(previousDataFileId); - } - - Long rootDataFileId = (Long) result[17]; - if (rootDataFileId != null) { - dataFile.setRootDataFileId(rootDataFileId); - } - - String protocol = (String) result[18]; - if (protocol != null) { - dataFile.setProtocol(protocol); - } - - String authority = (String) result[19]; - if (authority != null) { - dataFile.setAuthority(authority); - } - - String identifier = (String) result[20]; - if (identifier != null) { - dataFile.setIdentifier(identifier); - } - - Long embargo_id = (Long) result[21]; - if (embargo_id != null) { - if (embargoMap.containsKey(embargo_id)) { - dataFile.setEmbargo(embargoMap.get(embargo_id)); - } else { - Embargo e = embargoService.findByEmbargoId(embargo_id); - dataFile.setEmbargo(e); - embargoMap.put(embargo_id, e); - } - } - - // TODO: - // - if ingest status is "bad", look up the ingest report; - // - is it a dedicated thumbnail for the dataset? (do we ever need that info?? - not on the dataset page, I don't think...) - - // Is this a tabular file? - - if (datatableMap.get(dataFile.getId()) != null) { - dataTables.get(datatableMap.get(dataFile.getId())).setDataFile(dataFile); - dataFile.setDataTable(dataTables.get(datatableMap.get(dataFile.getId()))); - - } - - if (fileTagMap.get(dataFile.getId()) != null) { - for (Integer tag_id : fileTagMap.get(dataFile.getId())) { - DataFileTag tag = new DataFileTag(); - tag.setTypeByLabel(fileTagLabels.get(tag_id)); - tag.setDataFile(dataFile); - dataFile.addTag(tag); - } - } - - if (dataFile.isRestricted() && accessRequestFileIds.contains(dataFile.getId())) { - dataFile.addFileAccessRequester(au); - } - - dataFiles.add(dataFile); - filesMap.put(dataFile.getId(), i++); - } - - logger.fine("Retrieved and cached "+i+" datafiles."); - - i = 0; - for (DataFileCategory fileCategory : owner.getCategories()) { - //logger.fine("category: id="+fileCategory.getId()); - categoryMap.put(fileCategory.getId(), i++); - } - - logger.fine("Retrieved "+i+" file categories attached to the dataset."); - - version.setFileMetadatas(retrieveFileMetadataForVersion(owner, version, dataFiles, filesMap, categoryMap)); - logger.fine("Retrieved " + version.getFileMetadatas().size() + " filemetadatas for the version " + version.getId()); - owner.setFiles(dataFiles); - } - - private List retrieveFileMetadataForVersion(Dataset dataset, DatasetVersion version, List dataFiles, Map filesMap, Map categoryMap) { - List retList = new ArrayList<>(); - Map> categoryMetaMap = new HashMap<>(); - - List categoryResults = em.createNativeQuery("select t0.filecategories_id, t0.filemetadatas_id from filemetadata_datafilecategory t0, filemetadata t1 where (t0.filemetadatas_id = t1.id) AND (t1.datasetversion_id = "+version.getId()+")").getResultList(); - int i = 0; - for (Object[] result : categoryResults) { - Long category_id = (Long) result[0]; - Long filemeta_id = (Long) result[1]; - if (categoryMetaMap.get(filemeta_id) == null) { - categoryMetaMap.put(filemeta_id, new HashSet<>()); - } - categoryMetaMap.get(filemeta_id).add(category_id); - i++; - } - logger.fine("Retrieved and mapped "+i+" file categories attached to files in the version "+version.getId()); - - List metadataResults = em.createNativeQuery("select id, datafile_id, DESCRIPTION, LABEL, RESTRICTED, DIRECTORYLABEL, prov_freeform from FileMetadata where datasetversion_id = "+version.getId() + " ORDER BY LABEL").getResultList(); - - for (Object[] result : metadataResults) { - Integer filemeta_id = (Integer) result[0]; - - if (filemeta_id == null) { - continue; - } - - Long file_id = (Long) result[1]; - if (file_id == null) { - continue; - } - - Integer file_list_id = filesMap.get(file_id); - if (file_list_id == null) { - continue; - } - FileMetadata fileMetadata = new FileMetadata(); - fileMetadata.setId(filemeta_id.longValue()); - fileMetadata.setCategories(new LinkedList<>()); - - if (categoryMetaMap.get(fileMetadata.getId()) != null) { - for (Long cat_id : categoryMetaMap.get(fileMetadata.getId())) { - if (categoryMap.get(cat_id) != null) { - fileMetadata.getCategories().add(dataset.getCategories().get(categoryMap.get(cat_id))); - } - } - } - - fileMetadata.setDatasetVersion(version); - - // Link the FileMetadata object to the DataFile: - fileMetadata.setDataFile(dataFiles.get(file_list_id)); - // ... and the DataFile back to the FileMetadata: - fileMetadata.getDataFile().getFileMetadatas().add(fileMetadata); - - String description = (String) result[2]; - - if (description != null) { - fileMetadata.setDescription(description); - } - - String label = (String) result[3]; - - if (label != null) { - fileMetadata.setLabel(label); - } - - Boolean restricted = (Boolean) result[4]; - if (restricted != null) { - fileMetadata.setRestricted(restricted); - } - - String dirLabel = (String) result[5]; - if (dirLabel != null){ - fileMetadata.setDirectoryLabel(dirLabel); - } - - String provFreeForm = (String) result[6]; - if (provFreeForm != null){ - fileMetadata.setProvFreeForm(provFreeForm); - } - - retList.add(fileMetadata); - } - - logger.fine("Retrieved "+retList.size()+" file metadatas for version "+version.getId()+" (inside the retrieveFileMetadataForVersion method)."); - - - /* - We no longer perform this sort here, just to keep this filemetadata - list as identical as possible to when it's produced by the "traditional" - EJB method. When it's necessary to have the filemetadatas sorted by - FileMetadata.compareByLabel, the DatasetVersion.getFileMetadatasSorted() - method should be called. - - Collections.sort(retList, FileMetadata.compareByLabel); */ - - return retList; - } public List findIngestsInProgress() { if ( em.isOpen() ) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 1e4c56c6241..d4fd586b6bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -1977,11 +1977,6 @@ private String init(boolean initFull) { // init the list of FileMetadatas if (workingVersion.isDraft() && canUpdateDataset()) { readOnly = false; - } else { - // an attempt to retreive both the filemetadatas and datafiles early on, so that - // we don't have to do so later (possibly, many more times than necessary): - //AuthenticatedUser au = session.getUser() instanceof AuthenticatedUser ? (AuthenticatedUser) session.getUser() : null; - //datafileService.findFileMetadataOptimizedExperimental(dataset, workingVersion, au); } // This will default to all the files in the version, if the search term // parameter hasn't been specified yet: @@ -2880,11 +2875,6 @@ public String refresh() { this.dataset = this.workingVersion.getDataset(); } - if (readOnly) { - //AuthenticatedUser au = session.getUser() instanceof AuthenticatedUser ? (AuthenticatedUser) session.getUser() : null; - //datafileService.findFileMetadataOptimizedExperimental(dataset, workingVersion, au); - } - fileMetadatasSearch = selectFileMetadatasForDisplay(); displayCitation = dataset.getCitation(true, workingVersion); From cf6f317d8b829ce98d8da988ed13774af53e3921 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 26 May 2023 15:58:34 +0200 Subject: [PATCH 0254/1525] removed comments --- .../engine/command/impl/AbstractCreateDatasetCommand.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java index 300c9b6c1b1..eb171160376 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java @@ -139,9 +139,6 @@ public Dataset execute(CommandContext ctxt) throws CommandException { //Use for code that requires database ids postDBFlush(theDataset, ctxt); - // TODO: this needs to be moved in to an onSuccess method; not adding to this PR as its out of scope - // TODO: switch to asynchronous version when JPA sync works - // ctxt.index().asyncIndexDataset(theDataset.getId(), true); ctxt.index().asyncIndexDataset(theDataset, true); return theDataset; From 25d804e8ec316007661a1ba4e0f0c338d1f2a2c0 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 26 May 2023 16:19:01 +0200 Subject: [PATCH 0255/1525] async indexing in the new dataset version command --- .../engine/command/impl/CreateDatasetVersionCommand.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java index 3493b145be2..538500cfe0a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java @@ -65,10 +65,9 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException { //good wrapped response if the TOA/Request Access not in compliance prepareDatasetAndVersion(); - // TODO make async - // ctxt.index().asyncIndexDataset(dataset); - return ctxt.datasets().storeVersion(newVersion); - + DatasetVersion version = ctxt.datasets().storeVersion(newVersion); + ctxt.index().asyncIndexDataset(dataset, true); + return version; } /** From 9f9ad74e2e32d1575ad8f54d1aa37d05cffbacc2 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 26 May 2023 16:36:14 +0200 Subject: [PATCH 0256/1525] extended comment --- .../edu/harvard/iq/dataverse/ingest/IngestMessageBean.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java index 6e83a6584df..8712fbf3ffc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java @@ -75,6 +75,11 @@ public void onMessage(Message message) { ingestMessage = (IngestMessage) om.getObject(); // if the lock was removed while an ingest was queued, ratake the lock + // The "if" is the first thing that addDatasetLock method does. + // It has some complexity and would result in the code duplication if repeated here. + // If that check would be removed from the addDatasetLock method in the future without + // updating the code using this method, ingest code would still not break because + // we remove "all" ingest locks at the end (right now, there can be at most one ingest lock). datasetService.addDatasetLock(ingestMessage.getDatasetId(), DatasetLock.Reason.Ingest, ingestMessage.getAuthenticatedUserId(), From 433df840141339652370ea362796e7afe73c0d6b Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 26 May 2023 16:56:28 +0200 Subject: [PATCH 0257/1525] fixed nullpointer in unit test --- .../engine/command/impl/CreateDatasetVersionCommand.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java index 538500cfe0a..1d83f522f29 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java @@ -66,7 +66,9 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException { prepareDatasetAndVersion(); DatasetVersion version = ctxt.datasets().storeVersion(newVersion); - ctxt.index().asyncIndexDataset(dataset, true); + if (ctxt.index() != null) { + ctxt.index().asyncIndexDataset(dataset, true); + } return version; } From 2d6564aba1c44381ffd141c86c9de4ba82b5781d Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 26 May 2023 11:10:49 -0400 Subject: [PATCH 0258/1525] set expectations about NcML files (modern only) #9153 #9601 --- doc/sphinx-guides/source/user/dataset-management.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index a4f6251f11f..f530e825a19 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -353,6 +353,8 @@ NcML For NetCDF and HDF5 files, an attempt will be made to extract metadata in NcML_ (XML) format and save it as an auxiliary file. (See also :doc:`/developers/aux-file-support` in the Developer Guide.) A previewer for these NcML files is available (see :ref:`file-previews`). +Please note that only modern versions of these formats, the ones based on HDF5 such as NetCDF 4+ and HDF5 itself (rather than HDF4), will yield an NcML auxiliary file. + .. _NcML: https://docs.unidata.ucar.edu/netcdf-java/current/userguide/ncml_overview.html Geospatial Bounding Box From c198730f1a029e4a4f404de99b2805c21c69c909 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 26 May 2023 16:55:08 +0100 Subject: [PATCH 0259/1525] Added: getDatasetVersionCitation endpoint --- .../harvard/iq/dataverse/api/Datasets.java | 16 +++++++++++++++ .../harvard/iq/dataverse/api/DatasetsIT.java | 20 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 8 ++++++++ 3 files changed, 44 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index a7546c2a3fd..522a3adc899 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3893,4 +3893,20 @@ public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String } return ok(responseJson); } + + @GET + @AuthRequired + @Path("{id}/versions/{versionId}/citation") + public Response getDatasetVersionCitation(@Context ContainerRequestContext crc, + @PathParam("id") String datasetId, + @PathParam("versionId") String versionId, + @QueryParam("anonymizedAccess") boolean anonymizedAccess, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { + return response( req -> { + DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); + return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") + : ok(dsv.getCitation(true, anonymizedAccess)); + }, getRequestUser(crc)); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 89c80034321..5a55e8fb048 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3130,4 +3130,24 @@ public void getPrivateUrlDatasetVersion() { getPrivateUrlDatasetVersionAnonymizedResponse = UtilIT.getPrivateUrlDatasetVersion("invalidToken", null); getPrivateUrlDatasetVersionAnonymizedResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } + + @Test + public void getDatasetVersionCitation() { + Response createUser = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + Response getDatasetVersionCitationResponse = UtilIT.getDatasetVersionCitation(datasetId, ":draft", apiToken); + getDatasetVersionCitationResponse.prettyPrint(); + + getDatasetVersionCitationResponse.then().assertThat() + // We check that the returned message contains information expected for the citation string + .body("data.message", containsString("DRAFT VERSION")) + .statusCode(OK.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index ceb2a386f92..d055e460cb5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3206,4 +3206,12 @@ static Response getPrivateUrlDatasetVersion(String privateUrlToken, String anony .get("/api/datasets/privateUrlDatasetVersion/" + privateUrlToken); return response; } + + static Response getDatasetVersionCitation(Integer datasetId, String version, String apiToken) { + Response response = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .contentType("application/json") + .get("/api/datasets/" + datasetId + "/versions/" + version + "/citation"); + return response; + } } From 7a30c65d678958f44835460822e884897335e92a Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 26 May 2023 15:57:56 -0400 Subject: [PATCH 0260/1525] Makes the available roles listing show inherited roles and work the same in the permissions UI and the dataverses API. #9338 --- .../edu/harvard/iq/dataverse/DataverseRoleServiceBean.java | 3 ++- .../iq/dataverse/engine/command/impl/ListRolesCommand.java | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java index 9d09d0580e2..ffc51f09104 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java @@ -303,7 +303,8 @@ public Set availableRoles(Long dvId) { Set roles = dv.getRoles(); roles.addAll(findBuiltinRoles()); - while (!dv.isEffectivelyPermissionRoot()) { + //while (!dv.isEffectivelyPermissionRoot()) { + while (dv.getOwner() != null) { dv = dv.getOwner(); roles.addAll(dv.getRoles()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRolesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRolesCommand.java index da9e5adf247..1c8e4003d96 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRolesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRolesCommand.java @@ -27,7 +27,8 @@ public ListRolesCommand(DataverseRequest aRequest, Dataverse aDefinitionPoint) { @Override public Set execute(CommandContext ctxt) throws CommandException { - return definitionPoint.getRoles(); + return ctxt.roles().availableRoles(definitionPoint.getId()); + //return definitionPoint.getRoles(); } From 47fab3ca06f05936eb4ed83d24c99de36df4ff18 Mon Sep 17 00:00:00 2001 From: GPortas Date: Sat, 27 May 2023 07:57:20 +0100 Subject: [PATCH 0261/1525] Removed: custom anonymized field value from getPrivateUrlDatasetVersion endpoint --- .../harvard/iq/dataverse/api/Datasets.java | 5 ++-- .../iq/dataverse/util/json/JsonPrinter.java | 25 ++++++++----------- .../harvard/iq/dataverse/api/DatasetsIT.java | 10 +++----- .../edu/harvard/iq/dataverse/api/UtilIT.java | 3 +-- .../dataverse/util/json/JsonPrinterTest.java | 11 ++++---- 5 files changed, 24 insertions(+), 30 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 522a3adc899..2e221682faa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3869,8 +3869,7 @@ public Response getDatasetSummaryFieldNames() { @GET @Path("privateUrlDatasetVersion/{privateUrlToken}") - public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String privateUrlToken, - @QueryParam("anonymizedFieldValue") String anonymizedFieldValue) { + public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String privateUrlToken) { PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken); if (privateUrlUser == null) { return notFound("Private URL user not found"); @@ -3887,7 +3886,7 @@ public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String JsonObjectBuilder responseJson; if (isAnonymizedAccess) { List anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s"))); - responseJson = json(dsv, anonymizedFieldTypeNamesList, anonymizedFieldValue); + responseJson = json(dsv, anonymizedFieldTypeNamesList); } else { responseJson = json(dsv); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 68622e7af01..968b0243412 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -358,10 +358,10 @@ public static JsonObjectBuilder json(FileDetailsHolder ds) { } public static JsonObjectBuilder json(DatasetVersion dsv) { - return json(dsv, null, null); + return json(dsv, null); } - public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, String anonymizedFieldValue) { + public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList) { JsonObjectBuilder bld = jsonObjectBuilder() .add("id", dsv.getId()).add("datasetId", dsv.getDataset().getId()) .add("datasetPersistentId", dsv.getDataset().getGlobalId().asString()) @@ -397,7 +397,7 @@ public static JsonObjectBuilder json(DatasetVersion dsv, List anonymized .add("fileAccessRequest", dsv.getTermsOfUseAndAccess().isFileAccessRequest()); bld.add("metadataBlocks", (anonymizedFieldTypeNamesList != null) ? - jsonByBlocks(dsv.getDatasetFields(), anonymizedFieldTypeNamesList, anonymizedFieldValue) + jsonByBlocks(dsv.getDatasetFields(), anonymizedFieldTypeNamesList) : jsonByBlocks(dsv.getDatasetFields()) ); bld.add("files", jsonFileMetadatas(dsv.getFileMetadatas())); @@ -477,15 +477,15 @@ public static JsonObjectBuilder json(DatasetDistributor dist) { } public static JsonObjectBuilder jsonByBlocks(List fields) { - return jsonByBlocks(fields, null, null); + return jsonByBlocks(fields, null); } - public static JsonObjectBuilder jsonByBlocks(List fields, List anonymizedFieldTypeNamesList, String anonymizedFieldValue) { + public static JsonObjectBuilder jsonByBlocks(List fields, List anonymizedFieldTypeNamesList) { JsonObjectBuilder blocksBld = jsonObjectBuilder(); for (Map.Entry> blockAndFields : DatasetField.groupByBlock(fields).entrySet()) { MetadataBlock block = blockAndFields.getKey(); - blocksBld.add(block.getName(), JsonPrinter.json(block, blockAndFields.getValue(), anonymizedFieldTypeNamesList, anonymizedFieldValue)); + blocksBld.add(block.getName(), JsonPrinter.json(block, blockAndFields.getValue(), anonymizedFieldTypeNamesList)); } return blocksBld; } @@ -499,10 +499,10 @@ public static JsonObjectBuilder jsonByBlocks(List fields, List fields) { - return json(block, fields, null, null); + return json(block, fields, null); } - public static JsonObjectBuilder json(MetadataBlock block, List fields, List anonymizedFieldTypeNamesList, String anonymizedFieldValue) { + public static JsonObjectBuilder json(MetadataBlock block, List fields, List anonymizedFieldTypeNamesList) { JsonObjectBuilder blockBld = jsonObjectBuilder(); blockBld.add("displayName", block.getDisplayName()); @@ -510,7 +510,7 @@ public static JsonObjectBuilder json(MetadataBlock block, List fie final JsonArrayBuilder fieldsArray = Json.createArrayBuilder(); Map cvocMap = (datasetFieldService==null) ? new HashMap() :datasetFieldService.getCVocConf(true); - DatasetFieldWalker.walk(fields, settingsService, cvocMap, new DatasetFieldsToJson(fieldsArray, anonymizedFieldTypeNamesList, anonymizedFieldValue)); + DatasetFieldWalker.walk(fields, settingsService, cvocMap, new DatasetFieldsToJson(fieldsArray, anonymizedFieldTypeNamesList)); blockBld.add("fields", fieldsArray); return blockBld; @@ -746,16 +746,13 @@ private static class DatasetFieldsToJson implements DatasetFieldWalker.Listener Deque objectStack = new LinkedList<>(); Deque valueArrStack = new LinkedList<>(); List anonymizedFieldTypeNamesList = null; - String anonymizedFieldValue = null; - DatasetFieldsToJson(JsonArrayBuilder result) { valueArrStack.push(result); } - DatasetFieldsToJson(JsonArrayBuilder result, List anonymizedFieldTypeNamesList, String anonymizedFieldValue) { + DatasetFieldsToJson(JsonArrayBuilder result, List anonymizedFieldTypeNamesList) { this(result); this.anonymizedFieldTypeNamesList = anonymizedFieldTypeNamesList; - this.anonymizedFieldValue = anonymizedFieldValue; } @Override @@ -842,7 +839,7 @@ public void endCompoundValue(DatasetFieldCompoundValue dsfcv) { private void anonymizeField(JsonObjectBuilder jsonField) { jsonField.add("typeClass", "primitive"); - jsonField.add("value", (anonymizedFieldValue == null) ? BundleUtil.getStringFromBundle("dataset.anonymized.withheld") : anonymizedFieldValue); + jsonField.add("value", BundleUtil.getStringFromBundle("dataset.anonymized.withheld")); jsonField.add("multiple", false); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 5a55e8fb048..ef1ba09b5a4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3077,7 +3077,6 @@ public void testGetDatasetSummaryFieldNames() { assertFalse(actualSummaryFields.isEmpty()); } - @Test public void getPrivateUrlDatasetVersion() { Response createUser = UtilIT.createRandomUser(); @@ -3096,7 +3095,7 @@ public void getPrivateUrlDatasetVersion() { String tokenForPrivateUrlUser = JsonPath.from(privateUrlGet.body().asString()).getString("data.token"); // We verify that the response contains the dataset associated to the private URL token - Response getPrivateUrlDatasetVersionResponse = UtilIT.getPrivateUrlDatasetVersion(tokenForPrivateUrlUser, null); + Response getPrivateUrlDatasetVersionResponse = UtilIT.getPrivateUrlDatasetVersion(tokenForPrivateUrlUser); getPrivateUrlDatasetVersionResponse.then().assertThat() .body("data.datasetId", equalTo(datasetId)) .statusCode(OK.getStatusCode()); @@ -3113,13 +3112,12 @@ public void getPrivateUrlDatasetVersion() { privateUrlGet = UtilIT.privateUrlGet(datasetId, apiToken); tokenForPrivateUrlUser = JsonPath.from(privateUrlGet.body().asString()).getString("data.token"); - String testAnonymizedValue = "testAnonymizedValue"; - Response getPrivateUrlDatasetVersionAnonymizedResponse = UtilIT.getPrivateUrlDatasetVersion(tokenForPrivateUrlUser, testAnonymizedValue); + Response getPrivateUrlDatasetVersionAnonymizedResponse = UtilIT.getPrivateUrlDatasetVersion(tokenForPrivateUrlUser); // We verify that the response is anonymized for the author field getPrivateUrlDatasetVersionAnonymizedResponse.then().assertThat() .body("data.datasetId", equalTo(datasetId)) - .body("data.metadataBlocks.citation.fields[1].value", equalTo(testAnonymizedValue)) + .body("data.metadataBlocks.citation.fields[1].value", equalTo(BundleUtil.getStringFromBundle("dataset.anonymized.withheld"))) .body("data.metadataBlocks.citation.fields[1].typeClass", equalTo("primitive")) .body("data.metadataBlocks.citation.fields[1].multiple", equalTo(false)) .statusCode(OK.getStatusCode()); @@ -3127,7 +3125,7 @@ public void getPrivateUrlDatasetVersion() { UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); // Test invalid token - getPrivateUrlDatasetVersionAnonymizedResponse = UtilIT.getPrivateUrlDatasetVersion("invalidToken", null); + getPrivateUrlDatasetVersionAnonymizedResponse = UtilIT.getPrivateUrlDatasetVersion("invalidToken"); getPrivateUrlDatasetVersionAnonymizedResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index d055e460cb5..38febce0c4d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3199,10 +3199,9 @@ static Response getDatasetSummaryFieldNames() { return response; } - static Response getPrivateUrlDatasetVersion(String privateUrlToken, String anonymizedFieldValue) { + static Response getPrivateUrlDatasetVersion(String privateUrlToken) { Response response = given() .contentType("application/json") - .queryParam("anonymizedFieldValue", anonymizedFieldValue) .get("/api/datasets/privateUrlDatasetVersion/" + privateUrlToken); return response; } diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java index 741426558ab..8697b5aa354 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java @@ -23,6 +23,8 @@ import javax.json.JsonObject; import javax.json.JsonObjectBuilder; import javax.json.JsonString; + +import edu.harvard.iq.dataverse.util.BundleUtil; import org.junit.Test; import org.junit.Before; import static org.junit.Assert.assertEquals; @@ -202,7 +204,7 @@ public void testDatasetContactOutOfBoxNoPrivacy() { SettingsServiceBean nullServiceBean = null; DatasetFieldServiceBean nullDFServiceBean = null; JsonPrinter.injectSettingsService(nullServiceBean, nullDFServiceBean); - + JsonObject jsonObject = JsonPrinter.json(block, fields).build(); assertNotNull(jsonObject); @@ -241,7 +243,7 @@ public void testDatasetContactWithPrivacy() { vals.add(val); datasetContactField.setDatasetFieldCompoundValues(vals); fields.add(datasetContactField); - + DatasetFieldServiceBean nullDFServiceBean = null; JsonPrinter.injectSettingsService(new MockSettingsSvc(), nullDFServiceBean); @@ -340,12 +342,11 @@ public void testMetadataBlockAnonymized() { datasetAuthorField.setDatasetFieldCompoundValues(compoundValues); fields.add(datasetAuthorField); - String testAnonymizedFieldValue = "test"; - JsonObject actualJsonObject = JsonPrinter.json(block, fields, List.of("author"), testAnonymizedFieldValue).build(); + JsonObject actualJsonObject = JsonPrinter.json(block, fields, List.of("author")).build(); assertNotNull(actualJsonObject); JsonObject actualAuthorJsonObject = actualJsonObject.getJsonArray("fields").getJsonObject(0); - assertEquals(testAnonymizedFieldValue, actualAuthorJsonObject.getString("value")); + assertEquals(BundleUtil.getStringFromBundle("dataset.anonymized.withheld"), actualAuthorJsonObject.getString("value")); assertEquals("primitive", actualAuthorJsonObject.getString("typeClass")); assertFalse(actualAuthorJsonObject.getBoolean("multiple")); } From 53edae596346173f86f7cd52b9cfafc6ba2c5dcb Mon Sep 17 00:00:00 2001 From: GPortas Date: Sat, 27 May 2023 08:29:43 +0100 Subject: [PATCH 0262/1525] Added: getPrivateUrlDatasetVersionCitation endpoint --- .../harvard/iq/dataverse/api/Datasets.java | 12 ++++++++++ .../harvard/iq/dataverse/api/DatasetsIT.java | 22 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 7 ++++++ 3 files changed, 41 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 2e221682faa..8c9255be898 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3893,6 +3893,18 @@ public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String return ok(responseJson); } + @GET + @Path("privateUrlDatasetVersion/{privateUrlToken}/citation") + public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken") String privateUrlToken) { + PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken); + if (privateUrlUser == null) { + return notFound("Private URL user not found"); + } + DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken); + return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") + : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess())); + } + @GET @AuthRequired @Path("{id}/versions/{versionId}/citation") diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index ef1ba09b5a4..44a08e48369 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3129,6 +3129,28 @@ public void getPrivateUrlDatasetVersion() { getPrivateUrlDatasetVersionAnonymizedResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } + @Test + public void getPrivateUrlDatasetVersionCitation() { + Response createUser = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + UtilIT.privateUrlCreate(datasetId, apiToken, false); + Response privateUrlGet = UtilIT.privateUrlGet(datasetId, apiToken); + String tokenForPrivateUrlUser = JsonPath.from(privateUrlGet.body().asString()).getString("data.token"); + + Response getPrivateUrlDatasetVersionCitation = UtilIT.getPrivateUrlDatasetVersionCitation(tokenForPrivateUrlUser); + getPrivateUrlDatasetVersionCitation.then().assertThat() + // We check that the returned message contains information expected for the citation string + .body("data.message", containsString("DRAFT VERSION")) + .statusCode(OK.getStatusCode()); + } + @Test public void getDatasetVersionCitation() { Response createUser = UtilIT.createRandomUser(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 38febce0c4d..64c80442fcf 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3206,6 +3206,13 @@ static Response getPrivateUrlDatasetVersion(String privateUrlToken) { return response; } + static Response getPrivateUrlDatasetVersionCitation(String privateUrlToken) { + Response response = given() + .contentType("application/json") + .get("/api/datasets/privateUrlDatasetVersion/" + privateUrlToken + "/citation"); + return response; + } + static Response getDatasetVersionCitation(Integer datasetId, String version, String apiToken) { Response response = given() .header(API_TOKEN_HTTP_HEADER, apiToken) From 6784770e0c2c5df4058017ebb4fe93eb992ebee8 Mon Sep 17 00:00:00 2001 From: GPortas Date: Sat, 27 May 2023 08:51:35 +0100 Subject: [PATCH 0263/1525] Removed: anonymizedAccess query param from getDatasetVersionCitation endpoint --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 8c9255be898..cfe674535d6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3911,13 +3911,12 @@ public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken" public Response getDatasetVersionCitation(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, - @QueryParam("anonymizedAccess") boolean anonymizedAccess, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response( req -> { DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") - : ok(dsv.getCitation(true, anonymizedAccess)); + : ok(dsv.getCitation(true, false)); }, getRequestUser(crc)); } } From f561e392fe146e0e83c6910c9b3796a80591a7cc Mon Sep 17 00:00:00 2001 From: GPortas Date: Sun, 28 May 2023 14:11:27 +0100 Subject: [PATCH 0264/1525] Fixed: getDatasetVersionCitation endpoint --- .../java/edu/harvard/iq/dataverse/api/Datasets.java | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index cfe674535d6..ea2eea4d028 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3908,15 +3908,8 @@ public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken" @GET @AuthRequired @Path("{id}/versions/{versionId}/citation") - public Response getDatasetVersionCitation(@Context ContainerRequestContext crc, - @PathParam("id") String datasetId, - @PathParam("versionId") String versionId, - @Context UriInfo uriInfo, - @Context HttpHeaders headers) { - return response( req -> { - DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") - : ok(dsv.getCitation(true, false)); - }, getRequestUser(crc)); + public Response getDatasetVersionCitation(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response(req -> ok( + getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getCitation(true, false)), getRequestUser(crc)); } } From 34bc29e37dbcfc0628fde14057ea373e7d0b02da Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 30 May 2023 14:17:20 -0400 Subject: [PATCH 0265/1525] Removing the commented-out lines, once the changes have been tested. #9338 --- .../java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java | 1 - .../iq/dataverse/engine/command/impl/ListRolesCommand.java | 1 - 2 files changed, 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java index ffc51f09104..b83593f5b6e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java @@ -303,7 +303,6 @@ public Set availableRoles(Long dvId) { Set roles = dv.getRoles(); roles.addAll(findBuiltinRoles()); - //while (!dv.isEffectivelyPermissionRoot()) { while (dv.getOwner() != null) { dv = dv.getOwner(); roles.addAll(dv.getRoles()); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRolesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRolesCommand.java index 1c8e4003d96..d82b2e7a81d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRolesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRolesCommand.java @@ -28,7 +28,6 @@ public ListRolesCommand(DataverseRequest aRequest, Dataverse aDefinitionPoint) { @Override public Set execute(CommandContext ctxt) throws CommandException { return ctxt.roles().availableRoles(definitionPoint.getId()); - //return definitionPoint.getRoles(); } From 6d1acb076d1a9cfe58ac6189a35de7fc3d58dae5 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Wed, 31 May 2023 15:42:56 +0200 Subject: [PATCH 0266/1525] added sleep for reindex in datasets IT --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 6988fc333a3..a07fca411d4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -2803,6 +2803,7 @@ public void testCuratePublishedDatasetVersionCommand() throws IOException { UtilIT.publishDatasetViaNativeApi(datasetId, "updatecurrent", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + UtilIT.sleepForReindex(datasetId, apiToken, 5); Response getDatasetJsonAfterUpdate = UtilIT.nativeGet(datasetId, apiToken); getDatasetJsonAfterUpdate.prettyPrint(); getDatasetJsonAfterUpdate.then().assertThat() From 9e08e2a18c6a8defa101f1efeffa57af824fe451 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Wed, 31 May 2023 15:53:50 +0200 Subject: [PATCH 0267/1525] fixed expected string given int --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index a07fca411d4..127074bd6c1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -2803,7 +2803,7 @@ public void testCuratePublishedDatasetVersionCommand() throws IOException { UtilIT.publishDatasetViaNativeApi(datasetId, "updatecurrent", apiToken).then().assertThat().statusCode(OK.getStatusCode()); - UtilIT.sleepForReindex(datasetId, apiToken, 5); + UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5); Response getDatasetJsonAfterUpdate = UtilIT.nativeGet(datasetId, apiToken); getDatasetJsonAfterUpdate.prettyPrint(); getDatasetJsonAfterUpdate.then().assertThat() From 846ad06780d2113270f0eccd670cf27161694165 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Wed, 31 May 2023 16:12:08 +0200 Subject: [PATCH 0268/1525] fixed compile error --- .../java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java index e76d9c96fb1..77ec6701bc6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java @@ -23,6 +23,7 @@ import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.util.BundleUtil; import java.sql.Timestamp; import java.time.Instant; From d2325fa3915bc4884155b1a51fb0d4dbabed981a Mon Sep 17 00:00:00 2001 From: landreev Date: Wed, 31 May 2023 10:40:00 -0400 Subject: [PATCH 0269/1525] Update doc/sphinx-guides/source/installation/config.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/installation/config.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 32707cba8ab..742ac18cf0d 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2762,7 +2762,6 @@ If you don't want to register file-based PIDs for your installation, set: ``curl -X PUT -d 'false' http://localhost:8080/api/admin/settings/:FilePIDsEnabled`` -Note: File-level PID registration was added in Dataverse Software 4.9; it could not be disabled until Dataverse Software 4.9.3. It is possible to override the installation-wide setting for specific collections. For example, registration of PIDs for files can be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. See :ref:`collection-attributes-api` for details. From 0704ae26ef3467e52b14e2c7735a0aeee2d3d8fd Mon Sep 17 00:00:00 2001 From: landreev Date: Wed, 31 May 2023 10:40:18 -0400 Subject: [PATCH 0270/1525] Update doc/sphinx-guides/source/api/native-api.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 182e2c8dc0b..c702fac0489 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -751,7 +751,7 @@ The following attributes are supported: * ``name`` Name * ``description`` Description * ``affiliation`` Affiliation -* ``filePIDsEnabled`` ("true" or "false") Enables or disables registraion of file-level PIDs in datasets within the collection (overriding the instance-wide setting). +* ``filePIDsEnabled`` ("true" or "false") Enables or disables registration of file-level PIDs in datasets within the collection (overriding the instance-wide setting). Datasets From e337871696be60ad4277ed6a487bcd085daeb0b6 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 31 May 2023 16:02:45 -0400 Subject: [PATCH 0271/1525] A simple test for the new collection attributes api; added comments in the Dataverse class. #8889 --- .../edu/harvard/iq/dataverse/Dataverse.java | 17 ++++++++ .../iq/dataverse/api/DataversesIT.java | 43 +++++++++++++++++++ 2 files changed, 60 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index 6498f942cdb..50d5ae09548 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -590,9 +590,26 @@ public void setCitationDatasetFieldTypes(List citationDatasetF this.citationDatasetFieldTypes = citationDatasetFieldTypes; } + /** + * @Note: this setting is Nullable, with {@code null} indicating that the + * desired behavior is not explicitly configured for this specific collection. + * See the comment below. + */ @Column(nullable = true) private Boolean filePIDsEnabled; + /** + * Specifies whether the PIDs for Datafiles should be registered when publishing + * datasets in this Collection, if the behavior is explicitly configured. + * @return {@code Boolean.TRUE} if explicitly enabled, {@code Boolean.FALSE} if explicitly disabled. + * {@code null} indicates that the behavior is not explicitly defined, in which + * case the behavior should follow the explicit configuration of the first + * direct ancestor collection, or the instance-wide configuration, if none + * present. + * @Note: If present, this configuration therefore by default applies to all + * the sub-collections, unless explicitly overwritten there. + * @author landreev + */ public Boolean getFilePIDsEnabled() { return filePIDsEnabled; } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 819a182872a..4a4095e6c91 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -579,4 +579,47 @@ public void testImportDDI() throws IOException, InterruptedException { assertEquals(200, deleteUserResponse.getStatusCode()); } + @Test + public void testAttributesApi() throws Exception { + + Response createUser = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + if (createDataverseResponse.getStatusCode() != 201) { + System.out.println("A workspace for testing (a dataverse) couldn't be created in the root dataverse. The output was:\n\n" + createDataverseResponse.body().asString()); + System.out.println("\nPlease ensure that users can created dataverses in the root in order for this test to run."); + } else { + createDataverseResponse.prettyPrint(); + } + assertEquals(201, createDataverseResponse.getStatusCode()); + + String collectionAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + String newCollectionAlias = collectionAlias + "RENAMED"; + + // Change the alias of the collection: + + Response changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "alias", newCollectionAlias, apiToken); + changeAttributeResp.prettyPrint(); + + changeAttributeResp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("message.message", equalTo("Update successful")); + + // Check on the collection, under the new alias: + + Response collectionInfoResponse = UtilIT.exportDataverse(newCollectionAlias, apiToken); + collectionInfoResponse.prettyPrint(); + + collectionInfoResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.alias", equalTo(newCollectionAlias)); + + // Delete the collection (again, using its new alias): + + Response deleteCollectionResponse = UtilIT.deleteDataverse(newCollectionAlias, apiToken); + deleteCollectionResponse.prettyPrint(); + assertEquals(OK.getStatusCode(), deleteCollectionResponse.getStatusCode()); + } + } From 1f13674465c76e97a0bc72e607786d9cbadbe66e Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 1 Jun 2023 09:51:19 +0200 Subject: [PATCH 0272/1525] restored lock mode type NON for the findDeep --- src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 305afd2ed30..51741b0e686 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -135,6 +135,7 @@ public Dataset findDeep(Object pk) { .setHint("eclipselink.left-join-fetch", "o.files.creator") .setHint("eclipselink.left-join-fetch", "o.files.alternativePersistentIndentifiers") .setHint("eclipselink.left-join-fetch", "o.files.roleAssignments") + .setLockMode(LockModeType.NONE) .getSingleResult(); } From 311d115c0144f3e394f6525c7be6ebddd49bc2d2 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 1 Jun 2023 13:16:18 +0200 Subject: [PATCH 0273/1525] reverted last two commits as they do not fix the integration tests --- src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java | 1 - src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 1 - 2 files changed, 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 51741b0e686..305afd2ed30 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -135,7 +135,6 @@ public Dataset findDeep(Object pk) { .setHint("eclipselink.left-join-fetch", "o.files.creator") .setHint("eclipselink.left-join-fetch", "o.files.alternativePersistentIndentifiers") .setHint("eclipselink.left-join-fetch", "o.files.roleAssignments") - .setLockMode(LockModeType.NONE) .getSingleResult(); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 127074bd6c1..6988fc333a3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -2803,7 +2803,6 @@ public void testCuratePublishedDatasetVersionCommand() throws IOException { UtilIT.publishDatasetViaNativeApi(datasetId, "updatecurrent", apiToken).then().assertThat().statusCode(OK.getStatusCode()); - UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5); Response getDatasetJsonAfterUpdate = UtilIT.nativeGet(datasetId, apiToken); getDatasetJsonAfterUpdate.prettyPrint(); getDatasetJsonAfterUpdate.then().assertThat() From 3ef99d6512b45d0d0d196b4c3d5c7a468b99d18d Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 1 Jun 2023 13:18:13 +0200 Subject: [PATCH 0274/1525] reverted back to TODO in CreateDatasetVersionCommand --- .../engine/command/impl/CreateDatasetVersionCommand.java | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java index 1d83f522f29..3493b145be2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java @@ -65,11 +65,10 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException { //good wrapped response if the TOA/Request Access not in compliance prepareDatasetAndVersion(); - DatasetVersion version = ctxt.datasets().storeVersion(newVersion); - if (ctxt.index() != null) { - ctxt.index().asyncIndexDataset(dataset, true); - } - return version; + // TODO make async + // ctxt.index().asyncIndexDataset(dataset); + return ctxt.datasets().storeVersion(newVersion); + } /** From 58452dbdc41782054953dbbc45eaf7fb4751205a Mon Sep 17 00:00:00 2001 From: sirineREKIK Date: Thu, 1 Jun 2023 13:22:25 +0200 Subject: [PATCH 0275/1525] change description of Roles --- doc/sphinx-guides/source/api/native-api.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 2380a5142da..f663c1fb306 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -4556,8 +4556,9 @@ A curl example listing objects Parameters: -``key`` is the user token, for this API is must not be passed in the header. -``role_id`` User roles, several possible values among: +``key`` Is the user token, for this API is must not be passed in the header. + +``role_id`` Roles are customizable. Standard roles include: - ``1`` = Admin - ``2`` = File Downloader From 0869f0f70ad8d27f20a7222dc27866ed3e568f90 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 1 Jun 2023 14:19:10 +0200 Subject: [PATCH 0276/1525] restored TODO fix in CreateDatasetVersionCommand --- .../engine/command/impl/CreateDatasetVersionCommand.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java index 3493b145be2..1d83f522f29 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java @@ -65,10 +65,11 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException { //good wrapped response if the TOA/Request Access not in compliance prepareDatasetAndVersion(); - // TODO make async - // ctxt.index().asyncIndexDataset(dataset); - return ctxt.datasets().storeVersion(newVersion); - + DatasetVersion version = ctxt.datasets().storeVersion(newVersion); + if (ctxt.index() != null) { + ctxt.index().asyncIndexDataset(dataset, true); + } + return version; } /** From 7082d3c0345da28cb80990a40816c5cb281cd696 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 1 Jun 2023 15:17:13 +0200 Subject: [PATCH 0277/1525] reverted find deep when dataset was already given --- .../java/edu/harvard/iq/dataverse/search/IndexServiceBean.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 22471cf0105..91e2d21d9f2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -437,8 +437,7 @@ public void indexDvObject(DvObject objectIn) throws SolrServerException, IOExce } private void indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws SolrServerException, IOException { - Dataset deep = datasetService.findDeep(dataset.getId()); - doIndexDataset(deep, doNormalSolrDocCleanUp); + doIndexDataset(dataset, doNormalSolrDocCleanUp); updateLastIndexedTime(dataset.getId()); } From 54a0346bd74eeeff005ccfd3a7869215faee3481 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 1 Jun 2023 15:37:34 +0200 Subject: [PATCH 0278/1525] restored find deep for fuster publishing --- .../java/edu/harvard/iq/dataverse/search/IndexServiceBean.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 91e2d21d9f2..22471cf0105 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -437,7 +437,8 @@ public void indexDvObject(DvObject objectIn) throws SolrServerException, IOExce } private void indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws SolrServerException, IOException { - doIndexDataset(dataset, doNormalSolrDocCleanUp); + Dataset deep = datasetService.findDeep(dataset.getId()); + doIndexDataset(deep, doNormalSolrDocCleanUp); updateLastIndexedTime(dataset.getId()); } From 5adf6a4acf574eab8fd393a18c8bf6d0a390cf71 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 1 Jun 2023 16:18:03 +0200 Subject: [PATCH 0279/1525] removed both find deep calls from index bean --- .../harvard/iq/dataverse/search/IndexServiceBean.java | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 22471cf0105..baa13eba368 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -437,8 +437,7 @@ public void indexDvObject(DvObject objectIn) throws SolrServerException, IOExce } private void indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws SolrServerException, IOException { - Dataset deep = datasetService.findDeep(dataset.getId()); - doIndexDataset(deep, doNormalSolrDocCleanUp); + doIndexDataset(dataset, doNormalSolrDocCleanUp); updateLastIndexedTime(dataset.getId()); } @@ -1695,11 +1694,7 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc sid.addField(fieldName, doc.getFieldValue(fieldName)); } - Dataset dataset = null; - if (object.isInstanceofDataset()) { - dataset = datasetService.findDeep(object.getId()); - } - List paths = object.isInstanceofDataset() ? retrieveDVOPaths(dataset) + List paths = object.isInstanceofDataset() ? retrieveDVOPaths(datasetService.find(object.getId())) : retrieveDVOPaths(dataverseService.find(object.getId())); sid.removeField(SearchFields.SUBTREE); @@ -1707,7 +1702,7 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc UpdateResponse addResponse = solrClientService.getSolrClient().add(sid); UpdateResponse commitResponse = solrClientService.getSolrClient().commit(); if (object.isInstanceofDataset()) { - for (DataFile df : dataset.getFiles()) { + for (DataFile df : datasetService.find(object.getId()).getFiles()) { solrQuery.setQuery(SearchUtil.constructQuery(SearchFields.ENTITY_ID, df.getId().toString())); res = solrClientService.getSolrClient().query(solrQuery); if (!res.getResults().isEmpty()) { From b80e3e95dc5f708712b1b37e6e7e6cfd55c4a250 Mon Sep 17 00:00:00 2001 From: sirineREKIK <110403723+sirineREKIK@users.noreply.github.com> Date: Fri, 2 Jun 2023 14:36:59 +0200 Subject: [PATCH 0280/1525] Update doc/sphinx-guides/source/api/native-api.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/api/native-api.rst | 3 --- 1 file changed, 3 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index f663c1fb306..2ec89c9bf14 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -4538,9 +4538,6 @@ MyData The MyData API is used to get a list of just the datasets, dataverses or datafiles an authenticated user can edit. -MyData API -~~~~~~~~~~ - A curl example listing objects .. code-block:: bash From b14e06ad3420f601b760e83b371c69553cb48d94 Mon Sep 17 00:00:00 2001 From: sirineREKIK Date: Fri, 2 Jun 2023 16:43:58 +0200 Subject: [PATCH 0281/1525] curl url improvement --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 2ec89c9bf14..6b1bc72fea1 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -4549,7 +4549,7 @@ A curl example listing objects export PUBLISHED_STATES=Unpublished export PER_PAGE=10 - curl -H GET http://$SERVER_URL/api/mydata/retrieve?key=$API_TOKEN&role_ids=$ROLE_IDS&dvobject_types=$DVOBJECT_TYPES&published_states=$PUBLISHED_STATES&per_page=$PER_PAGE + curl "$SERVER_URL/api/mydata/retrieve?key=$API_TOKEN&role_ids=$ROLE_IDS&dvobject_types=$DVOBJECT_TYPES&published_states=$PUBLISHED_STATES&per_page=$PER_PAGE" Parameters: From 728c886444161cd23ad060ab8665961f4452ace6 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 01:16:08 +0200 Subject: [PATCH 0282/1525] ci: add installed gdcc packages to Maven run artifact --- .github/workflows/maven_unit_test.yml | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 51e3c5188f6..cc918e16d97 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -60,11 +60,15 @@ jobs: - run: rm -rf ~/.m2/repository/edu/harvard/iq/dataverse # Store the build for the next step (integration test) to avoid recompilation and to transfer coverage reports - - run: tar -cvf java-builddir.tar target + - run: | + tar -cvf java-builddir.tar target + tar -cvf java-m2-selection.tar ~/.m2/repository/io/gdcc/dataverse-* - uses: actions/upload-artifact@v3 with: - name: java-builddir - path: java-builddir.tar + name: java-artifacts + path: | + java-builddir.tar + java-m2-selection.tar retention-days: 3 integration-test: @@ -100,11 +104,12 @@ jobs: # Get the build output from the unit test job - uses: actions/download-artifact@v3 with: - name: java-builddir - - run: tar -xvf java-builddir.tar + name: java-artifacts + - run: | + tar -xvf java-builddir.tar + tar -xvf java-m2-selection.tar -C / # Run integration tests (but not unit tests again) - # TODO - adopt to parent module - run: mvn -DskipUnitTests -Dtarget.java.version=${{ matrix.jdk }} verify # Wrap up and send to coverage job @@ -148,6 +153,7 @@ jobs: jacoco:report coveralls:report # NOTE: this may be extended with adding a report to the build output, leave a comment, send to Sonarcloud, ... + push-app-img: name: Publish App Image permissions: From 77242777e2afa68335884d9eed2f68e1be750102 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 08:39:03 +0200 Subject: [PATCH 0283/1525] ci: some more fixes for Maven Tests --- .github/workflows/maven_unit_test.yml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index cc918e16d97..5a5e55f82aa 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -47,12 +47,14 @@ jobs: cache: maven # The reason why we use "install" here is that we want the submodules to be available in the next step. - # Also, we can cache them this way for jobs triggered by this one. - - name: Build with Maven + # Also, we can cache them this way for jobs triggered by this one. We need to skip ITs here, as we run + # them in the next job - but install usually runs through verify phase. + - name: Build with Maven and run unit tests run: > mvn -B -f modules/dataverse-parent -Dtarget.java.version=${{ matrix.jdk }} -DcompilerArgument=-Xlint:unchecked -P all-unit-tests + -DskipIntegrationTests -pl edu.harvard.iq:dataverse -am install @@ -127,10 +129,9 @@ jobs: steps: # Basic setup chores - uses: actions/checkout@v3 - - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v3 + - uses: actions/setup-java@v3 with: - java-version: ${{ matrix.jdk }} + java-version: '11' distribution: temurin cache: maven @@ -141,7 +142,7 @@ jobs: - run: tar -xvf java-reportdir.tar # Deposit Code Coverage - - name: Maven Code Coverage + - name: Deposit Code Coverage env: CI_NAME: github COVERALLS_SECRET: ${{ secrets.GITHUB_TOKEN }} From 0287e037e6b76be83006c8447a7007d2386aea91 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 08:46:48 +0200 Subject: [PATCH 0284/1525] build: add config to enable skipping jacoco and failsafe tests --- pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pom.xml b/pom.xml index fd92ccdc06e..2aa74657422 100644 --- a/pom.xml +++ b/pom.xml @@ -747,6 +747,7 @@ ${project.build.directory}/coverage-reports/jacoco-integration.exec failsafe.jacoco.args + ${skipIntegrationTests} @@ -758,6 +759,7 @@ ${project.build.directory}/coverage-reports/jacoco-integration.exec ${project.reporting.outputDirectory}/jacoco-integration-test-coverage-report + ${skipIntegrationTests} @@ -776,6 +778,7 @@ ${project.build.directory}/coverage-reports/merged.exec + ${skipIntegrationTests} @@ -787,6 +790,7 @@ ${project.build.directory}/coverage-reports/merged.exec ${project.reporting.outputDirectory}/jacoco-merged-test-coverage-report + ${skipIntegrationTests} @@ -824,6 +828,7 @@ testcontainers ${failsafe.jacoco.args} ${argLine} + ${skipIntegrationTests} From 28ddc3797b60eafb2b4144bb06610dc1caadf54f Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 11:08:25 +0200 Subject: [PATCH 0285/1525] fix(auth,oidc): do not add null verifier to cache Will throw an NPE otherwise. --- .../authorization/providers/oauth2/oidc/OIDCAuthProvider.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index 818332ea282..5eb2b391eb7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -191,7 +191,9 @@ public String buildAuthzUrl(String state, String callbackUrl) { // Cache the PKCE verifier, as we need the secret in it for verification later again, after the client sends us // the auth code! We use the state to cache the verifier, as the state is unique per authentication event. - this.verifierCache.put(state, pkceVerifier); + if (pkceVerifier != null) { + this.verifierCache.put(state, pkceVerifier); + } return req.toURI().toString(); } From 749c13be13c6c6e3c0f8bea26674e488cd62d6ab Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 11:10:09 +0200 Subject: [PATCH 0286/1525] feat(ct,oidc,auth): add dev keycloak connection to compose file --- docker-compose-dev.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index e3f93b77d4a..d582a6375f9 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -13,6 +13,10 @@ services: - DATAVERSE_DB_PASSWORD=secret - DATAVERSE_DB_USER=${DATAVERSE_DB_USER} - DATAVERSE_FEATURE_API_BEARER_AUTH=1 + - DATAVERSE_AUTH_OIDC_ENABLED=1 + - DATAVERSE_AUTH_OIDC_CLIENT_ID=test + - DATAVERSE_AUTH_OIDC_CLIENT_SECRET=94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8 + - DATAVERSE_AUTH_OIDC_AUTH_SERVER_URL=http://keycloak.mydomain.com:8090/realms/test ports: - "8080:8080" # HTTP (Dataverse Application) - "4848:4848" # HTTP (Payara Admin Console) From 849df5d2214a630fe70dc177c7188f09b4ae782b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 13:04:30 +0200 Subject: [PATCH 0287/1525] docs,fix(oidc): fix API auth docs example with new test realm --- doc/sphinx-guides/source/api/auth.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/auth.rst b/doc/sphinx-guides/source/api/auth.rst index bbc81b595e3..eced7afbbcf 100644 --- a/doc/sphinx-guides/source/api/auth.rst +++ b/doc/sphinx-guides/source/api/auth.rst @@ -77,6 +77,6 @@ To test if bearer tokens are working, you can try something like the following ( .. code-block:: bash - export TOKEN=`curl -s -X POST --location "http://keycloak.mydomain.com:8090/realms/oidc-realm/protocol/openid-connect/token" -H "Content-Type: application/x-www-form-urlencoded" -d "username=kcuser&password=kcpassword&grant_type=password&client_id=oidc-client&client_secret=ss6gE8mODCDfqesQaSG3gwUwZqZt547E" | jq '.access_token' -r | tr -d "\n"` + export TOKEN=`curl -s -X POST --location "http://keycloak.mydomain.com:8090/realms/test/protocol/openid-connect/token" -H "Content-Type: application/x-www-form-urlencoded" -d "username=user&password=user&grant_type=password&client_id=test&client_secret=94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8" | jq '.access_token' -r | tr -d "\n"` curl -H "Authorization: Bearer $TOKEN" http://localhost:8080/api/users/:me From 4bad9961847e4e636711e57136016c22d29aad1e Mon Sep 17 00:00:00 2001 From: sirineREKIK Date: Wed, 7 Jun 2023 16:14:45 +0200 Subject: [PATCH 0288/1525] add release note --- doc/release-notes/5042-add-mydata-doc-api.md | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 doc/release-notes/5042-add-mydata-doc-api.md diff --git a/doc/release-notes/5042-add-mydata-doc-api.md b/doc/release-notes/5042-add-mydata-doc-api.md new file mode 100644 index 00000000000..5a77e266725 --- /dev/null +++ b/doc/release-notes/5042-add-mydata-doc-api.md @@ -0,0 +1,2 @@ +An API named 'MyData' is supported by Dataverse. A documentation has been added describing its use (PR #9596) +This API is used to get a list of only the objects (datasets, dataverses or datafiles) that an authenticated user can modify. From 4be0b5d10f48627484f2466bdf40b6cce4fbb74d Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 9 Jun 2023 18:11:25 +0200 Subject: [PATCH 0289/1525] restored findDeep in index service bean but only where find was used --- .../harvard/iq/dataverse/search/IndexServiceBean.java | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index baa13eba368..0b8f93e47a9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -352,7 +352,7 @@ public Future indexDataverse(Dataverse dataverse, boolean processPaths) @TransactionAttribute(REQUIRES_NEW) public void indexDatasetInNewTransaction(Long datasetId) { //Dataset dataset) { boolean doNormalSolrDocCleanUp = false; - Dataset dataset = em.find(Dataset.class, datasetId); + Dataset dataset = datasetService.findDeep(datasetId); asyncIndexDataset(dataset, doNormalSolrDocCleanUp); dataset = null; } @@ -1694,7 +1694,11 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc sid.addField(fieldName, doc.getFieldValue(fieldName)); } - List paths = object.isInstanceofDataset() ? retrieveDVOPaths(datasetService.find(object.getId())) + Dataset dataset = null; + if (object.isInstanceofDataset()) { + dataset = datasetService.findDeep(object.getId()); + } + List paths = object.isInstanceofDataset() ? retrieveDVOPaths(dataset) : retrieveDVOPaths(dataverseService.find(object.getId())); sid.removeField(SearchFields.SUBTREE); @@ -1702,7 +1706,7 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc UpdateResponse addResponse = solrClientService.getSolrClient().add(sid); UpdateResponse commitResponse = solrClientService.getSolrClient().commit(); if (object.isInstanceofDataset()) { - for (DataFile df : datasetService.find(object.getId()).getFiles()) { + for (DataFile df : dataset.getFiles()) { solrQuery.setQuery(SearchUtil.constructQuery(SearchFields.ENTITY_ID, df.getId().toString())); res = solrClientService.getSolrClient().query(solrQuery); if (!res.getResults().isEmpty()) { From f5653e332e70289c2edf0a97844dcf56feb3079d Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 12 Jun 2023 16:54:11 -0400 Subject: [PATCH 0290/1525] clean up tests #9588 --- .../harvard/iq/dataverse/api/DatasetsIT.java | 72 +++++++++++++------ 1 file changed, 52 insertions(+), 20 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 44a08e48369..687ab453d24 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -81,8 +81,10 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.hasItems; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.Matchers.contains; import org.junit.AfterClass; import org.junit.Assert; @@ -113,6 +115,11 @@ public static void setUpClass() { Response removeExcludeEmail = UtilIT.deleteSetting(SettingsServiceBean.Key.ExcludeEmailFromExport); removeExcludeEmail.then().assertThat() .statusCode(200); + + Response removeAnonymizedFieldTypeNames = UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); + removeAnonymizedFieldTypeNames.then().assertThat() + .statusCode(200); + /* With Dual mode, we can no longer mess with upload methods since native is now required for anything to work Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); @@ -135,6 +142,11 @@ public static void afterClass() { Response removeExcludeEmail = UtilIT.deleteSetting(SettingsServiceBean.Key.ExcludeEmailFromExport); removeExcludeEmail.then().assertThat() .statusCode(200); + + Response removeAnonymizedFieldTypeNames = UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); + removeAnonymizedFieldTypeNames.then().assertThat() + .statusCode(200); + /* See above Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); removeDcmUrl.then().assertThat() @@ -3069,58 +3081,70 @@ public void testArchivalStatusAPI() throws IOException { @Test public void testGetDatasetSummaryFieldNames() { Response summaryFieldNamesResponse = UtilIT.getDatasetSummaryFieldNames(); - summaryFieldNamesResponse.then().assertThat().statusCode(OK.getStatusCode()); - JsonArray actualSummaryFields; - try (StringReader rdr = new StringReader(summaryFieldNamesResponse.body().asString())) { - actualSummaryFields = Json.createReader(rdr).readObject().getJsonArray("data"); - } - assertFalse(actualSummaryFields.isEmpty()); + summaryFieldNamesResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + // check for any order + .body("data", hasItems("dsDescription", "subject", "keyword", "publication", "notesText")) + // check for exact order + .body("data", contains("dsDescription", "subject", "keyword", "publication", "notesText")); } @Test public void getPrivateUrlDatasetVersion() { Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); // Non-anonymized test - Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - UtilIT.privateUrlCreate(datasetId, apiToken, false); + UtilIT.privateUrlCreate(datasetId, apiToken, false).then().assertThat().statusCode(OK.getStatusCode()); Response privateUrlGet = UtilIT.privateUrlGet(datasetId, apiToken); + privateUrlGet.then().assertThat().statusCode(OK.getStatusCode()); String tokenForPrivateUrlUser = JsonPath.from(privateUrlGet.body().asString()).getString("data.token"); // We verify that the response contains the dataset associated to the private URL token Response getPrivateUrlDatasetVersionResponse = UtilIT.getPrivateUrlDatasetVersion(tokenForPrivateUrlUser); getPrivateUrlDatasetVersionResponse.then().assertThat() - .body("data.datasetId", equalTo(datasetId)) - .statusCode(OK.getStatusCode()); + .statusCode(OK.getStatusCode()) + .body("data.datasetId", equalTo(datasetId)); // Test anonymized - Response setAnonymizedFieldsSettingResponse = UtilIT.setSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames, "author"); setAnonymizedFieldsSettingResponse.then().assertThat().statusCode(OK.getStatusCode()); createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - UtilIT.privateUrlCreate(datasetId, apiToken, true); + UtilIT.privateUrlCreate(datasetId, apiToken, true).then().assertThat().statusCode(OK.getStatusCode()); privateUrlGet = UtilIT.privateUrlGet(datasetId, apiToken); + privateUrlGet.then().assertThat().statusCode(OK.getStatusCode()); tokenForPrivateUrlUser = JsonPath.from(privateUrlGet.body().asString()).getString("data.token"); Response getPrivateUrlDatasetVersionAnonymizedResponse = UtilIT.getPrivateUrlDatasetVersion(tokenForPrivateUrlUser); + getPrivateUrlDatasetVersionAnonymizedResponse.prettyPrint(); // We verify that the response is anonymized for the author field getPrivateUrlDatasetVersionAnonymizedResponse.then().assertThat() + .statusCode(OK.getStatusCode()) .body("data.datasetId", equalTo(datasetId)) .body("data.metadataBlocks.citation.fields[1].value", equalTo(BundleUtil.getStringFromBundle("dataset.anonymized.withheld"))) .body("data.metadataBlocks.citation.fields[1].typeClass", equalTo("primitive")) - .body("data.metadataBlocks.citation.fields[1].multiple", equalTo(false)) - .statusCode(OK.getStatusCode()); + .body("data.metadataBlocks.citation.fields[1].multiple", equalTo(false)); + + // Similar to the check above but doesn't rely on fields[1] + List authors = with(getPrivateUrlDatasetVersionAnonymizedResponse.body().asString()).param("fieldToFind", "author") + .getJsonObject("data.metadataBlocks.citation.fields.findAll { fields -> fields.typeName == fieldToFind }"); + Map firstAuthor = authors.get(0); + String value = (String) firstAuthor.get("value"); + assertEquals(BundleUtil.getStringFromBundle("dataset.anonymized.withheld"), value); UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); @@ -3132,42 +3156,50 @@ public void getPrivateUrlDatasetVersion() { @Test public void getPrivateUrlDatasetVersionCitation() { Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - UtilIT.privateUrlCreate(datasetId, apiToken, false); + UtilIT.privateUrlCreate(datasetId, apiToken, false).then().assertThat().statusCode(OK.getStatusCode()); Response privateUrlGet = UtilIT.privateUrlGet(datasetId, apiToken); String tokenForPrivateUrlUser = JsonPath.from(privateUrlGet.body().asString()).getString("data.token"); Response getPrivateUrlDatasetVersionCitation = UtilIT.getPrivateUrlDatasetVersionCitation(tokenForPrivateUrlUser); + getPrivateUrlDatasetVersionCitation.prettyPrint(); + getPrivateUrlDatasetVersionCitation.then().assertThat() + .statusCode(OK.getStatusCode()) // We check that the returned message contains information expected for the citation string - .body("data.message", containsString("DRAFT VERSION")) - .statusCode(OK.getStatusCode()); + .body("data.message", containsString("DRAFT VERSION")); } @Test public void getDatasetVersionCitation() { Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); - Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); Response getDatasetVersionCitationResponse = UtilIT.getDatasetVersionCitation(datasetId, ":draft", apiToken); getDatasetVersionCitationResponse.prettyPrint(); getDatasetVersionCitationResponse.then().assertThat() + .statusCode(OK.getStatusCode()) // We check that the returned message contains information expected for the citation string - .body("data.message", containsString("DRAFT VERSION")) - .statusCode(OK.getStatusCode()); + .body("data.message", containsString("DRAFT VERSION")); } } From 1b9403fc82e162e73d4f574973fae720d16b283e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 12 Jun 2023 23:11:46 +0200 Subject: [PATCH 0291/1525] test(e2e): migrate MetadataBlocksIT to JUnit5 --- .../edu/harvard/iq/dataverse/api/MetadataBlocksIT.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java index 05b7a7910ff..c4dba09453f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java @@ -4,18 +4,18 @@ import com.jayway.restassured.response.Response; import static javax.ws.rs.core.Response.Status.OK; import org.hamcrest.CoreMatchers; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; public class MetadataBlocksIT { - @BeforeClass + @BeforeAll public static void setUpClass() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); } @Test - public void testGetCitationBlock() { + void testGetCitationBlock() { Response getCitationBlock = UtilIT.getMetadataBlock("citation"); getCitationBlock.prettyPrint(); getCitationBlock.then().assertThat() From 4ed04c55b0d0a1a40e6133dad4fae64bb026892f Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 13 Jun 2023 00:00:48 +0200 Subject: [PATCH 0292/1525] test(e2e): add E2E test creating dataset with all default metadata blocks values in use --- .../iq/dataverse/api/MetadataBlocksIT.java | 36 ++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java index c4dba09453f..c958c339daf 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java @@ -2,11 +2,16 @@ import com.jayway.restassured.RestAssured; import com.jayway.restassured.response.Response; -import static javax.ws.rs.core.Response.Status.OK; import org.hamcrest.CoreMatchers; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import static javax.ws.rs.core.Response.Status.CREATED; +import static javax.ws.rs.core.Response.Status.OK; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assumptions.assumeFalse; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + public class MetadataBlocksIT { @BeforeAll @@ -22,5 +27,34 @@ void testGetCitationBlock() { .statusCode(OK.getStatusCode()) .body("data.fields.subject.controlledVocabularyValues[0]", CoreMatchers.is("Agricultural Sciences")); } + + @Test + void testDatasetWithAllDefaultMetadata() { + // given + Response createUser = UtilIT.createRandomUser(); + assumeTrue(createUser.statusCode() < 300, + "code=" + createUser.statusCode() + + ", response=" + createUser.prettyPrint()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + assumeFalse(apiToken == null || apiToken.isBlank()); + + Response createCollection = UtilIT.createRandomDataverse(apiToken); + assumeTrue(createCollection.statusCode() < 300, + "code=" + createCollection.statusCode() + + ", response=" + createCollection.prettyPrint()); + String dataverseAlias = UtilIT.getAliasFromResponse(createCollection); + assumeFalse(dataverseAlias == null || dataverseAlias.isBlank()); + + // when + String pathToJsonFile = "scripts/api/data/dataset-create-new-all-default-fields.json"; + Response createDataset = UtilIT.createDatasetViaNativeApi(dataverseAlias, pathToJsonFile, apiToken); + + // then + assertEquals(CREATED.getStatusCode(), createDataset.statusCode(), + "code=" + createDataset.statusCode() + + ", response=" + createDataset.prettyPrint()); + createDataset.then().assertThat() + .body("status", CoreMatchers.equalTo("OK")); + } } From 10a82e4e69373d52f7cc3c77e7f3d985ed48efbc Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 13 Jun 2023 00:13:00 +0200 Subject: [PATCH 0293/1525] fix(e2e): fix test data for "series" now multiple #9633 --- scripts/api/data/dataset-create-new-all-default-fields.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/api/data/dataset-create-new-all-default-fields.json b/scripts/api/data/dataset-create-new-all-default-fields.json index 58e2b26e8e8..4d9303a4209 100644 --- a/scripts/api/data/dataset-create-new-all-default-fields.json +++ b/scripts/api/data/dataset-create-new-all-default-fields.json @@ -710,9 +710,9 @@ }, { "typeName": "series", - "multiple": false, + "multiple": true, "typeClass": "compound", - "value": { + "value": [{ "seriesName": { "typeName": "seriesName", "multiple": false, @@ -725,7 +725,7 @@ "typeClass": "primitive", "value": "SeriesInformation" } - } + }] }, { "typeName": "software", From 8daf7266435ecd7dced165c27888f4d69b54708a Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 13 Jun 2023 00:13:56 +0200 Subject: [PATCH 0294/1525] fix(e2e): fix test data for dataset with all default metadata There was a non-existing controlled vocabulary term used. --- scripts/api/data/dataset-create-new-all-default-fields.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/api/data/dataset-create-new-all-default-fields.json b/scripts/api/data/dataset-create-new-all-default-fields.json index 4d9303a4209..4af128955c9 100644 --- a/scripts/api/data/dataset-create-new-all-default-fields.json +++ b/scripts/api/data/dataset-create-new-all-default-fields.json @@ -1404,7 +1404,7 @@ "multiple": true, "typeClass": "controlledVocabulary", "value": [ - "cell counting", + "genome sequencing", "cell sorting", "clinical chemistry analysis", "DNA methylation profiling" From 502e660fe342939a617edd6d17a425c83b5a269b Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 12 May 2023 13:22:46 -0400 Subject: [PATCH 0295/1525] suppress thumb generation after a failure --- .../edu/harvard/iq/dataverse/DvObject.java | 14 +++++ .../dataaccess/ImageThumbConverter.java | 55 ++++++++++++------- .../dataverse/ingest/IngestServiceBean.java | 4 +- .../V5.13.0.1__9506-track-thumb-failures.sql | 1 + 4 files changed, 54 insertions(+), 20 deletions(-) create mode 100644 src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index 854888737ee..6cb3816e3f1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -181,7 +181,20 @@ public boolean isPreviewImageAvailable() { public void setPreviewImageAvailable(boolean status) { this.previewImageAvailable = status; } + + /** Indicates whether a previous attempt to generate a preview image has failed, regardless of size. + * If so, we won't want to try again every time the preview/thumbnail is requested for a view. + */ + private boolean previewsHaveFailed; + + public boolean isPreviewsHaveFailed() { + return previewsHaveFailed; + } + public void setPreviewsHaveFailed(boolean previewsHaveFailed) { + this.previewsHaveFailed = previewsHaveFailed; + } + public Timestamp getModificationTime() { return modificationTime; } @@ -462,6 +475,7 @@ public void setStorageIdentifier(String storageIdentifier) { */ public abstract boolean isAncestorOf( DvObject other ); + @OneToMany(mappedBy = "definitionPoint",cascade={ CascadeType.REMOVE, CascadeType.MERGE,CascadeType.PERSIST}, orphanRemoval=true) List roleAssignments; } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 2b4aed3a9a5..eb08646454d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -48,6 +48,7 @@ import java.nio.channels.WritableByteChannel; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.io.IOUtils; //import org.primefaces.util.Base64; @@ -110,15 +111,24 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s } if (isThumbnailCached(storageIO, size)) { + logger.fine("Found cached thumbnail for " + file.getId()); return true; } - logger.fine("Checking for thumbnail, file type: " + file.getContentType()); - - if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { - return generateImageThumbnail(storageIO, size); - } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { - return generatePDFThumbnail(storageIO, size); + logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + "to generate thumbnail, file id: " + file.getId()); + // Don't try to generate if there have been failures: + if (!file.isPreviewsHaveFailed()) { + boolean thumbnailGenerated = false; + if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { + thumbnailGenerated = generateImageThumbnail(storageIO, size); + } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { + thumbnailGenerated = generatePDFThumbnail(storageIO, size); + } + if (!thumbnailGenerated) { + logger.fine("No thumbnail generated for " + file.getId()); + file.setPreviewGenerationHasPreviouslyFailed(true); + } + return thumbnailGenerated; } return false; @@ -436,20 +446,27 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) { if (cachedThumbnailChannel == null) { logger.fine("Null channel for aux object " + THUMBNAIL_SUFFIX + size); - // try to generate, if not available: - boolean generated = false; - if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { - generated = generateImageThumbnail(storageIO, size); - } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { - generated = generatePDFThumbnail(storageIO, size); - } + // try to generate, if not available and hasn't failed before + logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + "to generate base64 thumbnail, file id: " + file.getId()); + if (!file.isPreviewsHaveFailed()) { + boolean generated = false; + if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { + generated = generateImageThumbnail(storageIO, size); + } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { + generated = generatePDFThumbnail(storageIO, size); + } - if (generated) { - // try to open again: - try { - cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); - } catch (Exception ioEx) { - cachedThumbnailChannel = null; + if (!generated) { + // Record failure + logger.fine("Failed to generate base64 thumbnail for file id: " + file.getId()); + file.setPreviewGenerationHasPreviouslyFailed(true); + } else { + // Success - try to open again: + try { + cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); + } catch (Exception ioEx) { + cachedThumbnailChannel = null; + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 5a353453fe8..fbe2d7b38ff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -292,7 +292,9 @@ public List saveAndAddFilesToDataset(DatasetVersion version, } catch (IOException ioex) { logger.warning("Failed to save generated file " + generated.toString()); - } + //Shouldn't mark this file as having a preview after this. + dataFile.setPreviewImageAvailable(false); + } } // ... but we definitely want to delete it: diff --git a/src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql b/src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql new file mode 100644 index 00000000000..9b12d27db91 --- /dev/null +++ b/src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql @@ -0,0 +1 @@ +ALTER TABLE dvobject ADD COLUMN IF NOT EXISTS previewshavefailed BOOLEAN DEFAULT FALSE; \ No newline at end of file From 0fea5ccca11b2348429ddfee75e4bafc709c7473 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 12 May 2023 13:25:38 -0400 Subject: [PATCH 0296/1525] refactor error --- .../harvard/iq/dataverse/dataaccess/ImageThumbConverter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index eb08646454d..254c334d655 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -126,7 +126,7 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s } if (!thumbnailGenerated) { logger.fine("No thumbnail generated for " + file.getId()); - file.setPreviewGenerationHasPreviouslyFailed(true); + file.setPreviewsHaveFailed(true); } return thumbnailGenerated; } @@ -459,7 +459,7 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) { if (!generated) { // Record failure logger.fine("Failed to generate base64 thumbnail for file id: " + file.getId()); - file.setPreviewGenerationHasPreviouslyFailed(true); + file.setPreviewsHaveFailed(true); } else { // Success - try to open again: try { From 8f5350ae0df4df60c55ff770259531935cb6ac9b Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 15 May 2023 10:32:21 -0400 Subject: [PATCH 0297/1525] cache isThumb available --- .../iq/dataverse/ThumbnailServiceWrapper.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java index 6c8db8c124b..e2bb21c8a4c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java @@ -49,6 +49,7 @@ public class ThumbnailServiceWrapper implements java.io.Serializable { private Map dvobjectThumbnailsMap = new HashMap<>(); private Map dvobjectViewMap = new HashMap<>(); + private Map hasThumbMap = new HashMap<>(); private String getAssignedDatasetImage(Dataset dataset, int size) { if (dataset == null) { @@ -133,7 +134,7 @@ public String getFileCardImageAsBase64Url(SolrSearchResult result) { if ((!((DataFile)result.getEntity()).isRestricted() || permissionsWrapper.hasDownloadFilePermission(result.getEntity())) - && dataFileService.isThumbnailAvailable((DataFile) result.getEntity())) { + && isThumbnailAvailable((DataFile) result.getEntity())) { cardImageUrl = ImageThumbConverter.getImageThumbnailAsBase64( (DataFile) result.getEntity(), @@ -159,6 +160,13 @@ public String getFileCardImageAsBase64Url(SolrSearchResult result) { return null; } + public boolean isThumbnailAvailable(DataFile entity) { + if(!hasThumbMap.containsKey(entity.getId())) { + hasThumbMap.put(entity.getId(), dataFileService.isThumbnailAvailable(entity)); + } + return hasThumbMap.get(entity.getId()); + } + // it's the responsibility of the user - to make sure the search result // passed to this method is of the Dataset type! public String getDatasetCardImageAsBase64Url(SolrSearchResult result) { @@ -295,7 +303,7 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo } } - if (dataFileService.isThumbnailAvailable(thumbnailImageFile)) { + if (isThumbnailAvailable(thumbnailImageFile)) { cardImageUrl = ImageThumbConverter.getImageThumbnailAsBase64( thumbnailImageFile, size); @@ -323,6 +331,7 @@ public String getDataverseCardImageAsBase64Url(SolrSearchResult result) { public void resetObjectMaps() { dvobjectThumbnailsMap = new HashMap<>(); dvobjectViewMap = new HashMap<>(); + hasThumbMap = new HashMap<>(); } From 8604eef7f470eade8dbf885ed42bc47407db74ff Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 15 May 2023 13:22:18 -0400 Subject: [PATCH 0298/1525] set thumb fail column --- .../java/edu/harvard/iq/dataverse/DataFileServiceBean.java | 5 ++++- .../harvard/iq/dataverse/dataaccess/ImageThumbConverter.java | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 196f84b6877..a5822828682 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1127,7 +1127,7 @@ public boolean isThumbnailAvailable (DataFile file) { } // If thumbnails are not even supported for this class of files, - // there's notthing to talk about: + // there's nothing to talk about: if (!FileUtil.isThumbnailSupported(file)) { return false; } @@ -1149,6 +1149,9 @@ public boolean isThumbnailAvailable (DataFile file) { file.setPreviewImageAvailable(true); this.save(file); return true; + } else { + file.setPreviewsHaveFailed(true); + this.save(file); } return false; diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 254c334d655..ab9294eea72 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -115,7 +115,7 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s return true; } - logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + "to generate thumbnail, file id: " + file.getId()); + logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + " to generate thumbnail, file id: " + file.getId()); // Don't try to generate if there have been failures: if (!file.isPreviewsHaveFailed()) { boolean thumbnailGenerated = false; From aeae8f4ddbb05794c177e9b1d33725e1ed7d7e2f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 15 May 2023 13:50:49 -0400 Subject: [PATCH 0299/1525] use thumb wrapper in edit and view files --- src/main/webapp/editFilesFragment.xhtml | 4 ++-- src/main/webapp/file-info-fragment.xhtml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index a4e635b8c14..af06b44e3bc 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -360,13 +360,13 @@
    - - + #{fileMetadata.label} diff --git a/src/main/webapp/file-info-fragment.xhtml b/src/main/webapp/file-info-fragment.xhtml index 33a8d2c3ca5..3e8e80d51e7 100644 --- a/src/main/webapp/file-info-fragment.xhtml +++ b/src/main/webapp/file-info-fragment.xhtml @@ -28,8 +28,8 @@
    - - + From c4ad20bc4b67b93908e60b76a251240f4a6e2540 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 17 May 2023 13:49:35 -0400 Subject: [PATCH 0300/1525] add api --- .../edu/harvard/iq/dataverse/api/Admin.java | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index d219339add9..14c556e9caa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2321,4 +2321,26 @@ public Response getSignedUrl(@Context ContainerRequestContext crc, JsonObject ur return ok(Json.createObjectBuilder().add(ExternalToolHandler.SIGNED_URL, signedUrl)); } + @DELETE + @Path("/clearThumbnailFailureFlag") + public Response clearThumbnailFailureFlag() { + em.createNativeQuery("UPDATE dvobject SET previewshavefailed = FALSE").executeUpdate(); + return ok("Thumnail Failure Flags cleared."); + } + + @DELETE + @Path("/clearThumbnailFailureFlag/{id}") + public Response clearThumbnailFailureFlagByDatafile(@PathParam("id") String fileId) { + try { + DataFile df = findDataFileOrDie(fileId); + Query deleteQuery = em.createNativeQuery("UPDATE dvobject SET previewshavefailed = FALSE where id = ?"); + deleteQuery.setParameter(1, df.getId()); + deleteQuery.executeUpdate(); + return ok("Thumnail Failure Flag cleared for file id=: " + df.getId() + "."); + } catch (WrappedResponse r) { + logger.info("Could not find file with the id: " + fileId); + return error(Status.BAD_REQUEST, "Could not find file with the id: " + fileId); + } + } + } From 63e98b3b60a4baae98f1f88a282b97694929c443 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 17 May 2023 14:16:47 -0400 Subject: [PATCH 0301/1525] make clearer --- .../java/edu/harvard/iq/dataverse/DataFileServiceBean.java | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index a5822828682..f41565c9449 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1149,11 +1149,9 @@ public boolean isThumbnailAvailable (DataFile file) { file.setPreviewImageAvailable(true); this.save(file); return true; - } else { - file.setPreviewsHaveFailed(true); - this.save(file); } - + file.setPreviewsHaveFailed(true); + this.save(file); return false; } From 2671cb75effb5425d02b3e874c7525b7833dc533 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 17 May 2023 14:25:58 -0400 Subject: [PATCH 0302/1525] update comment --- src/main/java/edu/harvard/iq/dataverse/DvObject.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index 6cb3816e3f1..87619450133 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -182,8 +182,11 @@ public void setPreviewImageAvailable(boolean status) { this.previewImageAvailable = status; } - /** Indicates whether a previous attempt to generate a preview image has failed, regardless of size. - * If so, we won't want to try again every time the preview/thumbnail is requested for a view. + /** + * Indicates whether a previous attempt to generate a preview image has failed, + * regardless of size. This could be due to the file not being accessible, or a + * real failure in generating the thumbnail. In both cases, we won't want to try + * again every time the preview/thumbnail is requested for a view. */ private boolean previewsHaveFailed; From 19db99b1427700c9cc4ad462c0edd017e6dd5799 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 17 May 2023 14:26:28 -0400 Subject: [PATCH 0303/1525] remove setting flag where datafile is not clearly being saved to db --- .../harvard/iq/dataverse/dataaccess/ImageThumbConverter.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index ab9294eea72..921faba7989 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -126,7 +126,6 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s } if (!thumbnailGenerated) { logger.fine("No thumbnail generated for " + file.getId()); - file.setPreviewsHaveFailed(true); } return thumbnailGenerated; } @@ -459,7 +458,6 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) { if (!generated) { // Record failure logger.fine("Failed to generate base64 thumbnail for file id: " + file.getId()); - file.setPreviewsHaveFailed(true); } else { // Success - try to open again: try { From 156d025970eeb5223b6fd8343db09cafee057fed Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 1 Jun 2023 15:09:25 -0400 Subject: [PATCH 0304/1525] fix non-merge-able error when recording thumb fail --- .../iq/dataverse/DataFileServiceBean.java | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index f41565c9449..880b2ea7dc4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1142,17 +1142,17 @@ public boolean isThumbnailAvailable (DataFile file) { is more important... */ - - if (ImageThumbConverter.isThumbnailAvailable(file)) { - file = this.find(file.getId()); - file.setPreviewImageAvailable(true); - this.save(file); - return true; - } - file.setPreviewsHaveFailed(true); - this.save(file); - return false; + file = this.find(file.getId()); + if (ImageThumbConverter.isThumbnailAvailable(file)) { + file.setPreviewImageAvailable(true); + this.save(file); + return true; + } else { + file.setPreviewsHaveFailed(true); + this.save(file); + return false; + } } From 97aa46cb3e9bd2d424961e68e9d024216740c57f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 13 Jun 2023 16:50:38 -0400 Subject: [PATCH 0305/1525] rename script --- ...humb-failures.sql => V5.13.0.2__9506-track-thumb-failures.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V5.13.0.1__9506-track-thumb-failures.sql => V5.13.0.2__9506-track-thumb-failures.sql} (100%) diff --git a/src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql b/src/main/resources/db/migration/V5.13.0.2__9506-track-thumb-failures.sql similarity index 100% rename from src/main/resources/db/migration/V5.13.0.1__9506-track-thumb-failures.sql rename to src/main/resources/db/migration/V5.13.0.2__9506-track-thumb-failures.sql From dbc36c9d938571a5b61156611c445d266fbafe76 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 13 Jun 2023 17:06:19 -0400 Subject: [PATCH 0306/1525] refactor - remove duplicate code --- .../dataaccess/ImageThumbConverter.java | 29 ++++++------------- 1 file changed, 9 insertions(+), 20 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 921faba7989..fb0785ffd7b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -114,7 +114,11 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s logger.fine("Found cached thumbnail for " + file.getId()); return true; } + return generateThumbnail(storageIO, size); + } + + private static boolean generateThumbnail(StorageIO storageIO, int size) { logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + " to generate thumbnail, file id: " + file.getId()); // Don't try to generate if there have been failures: if (!file.isPreviewsHaveFailed()) { @@ -131,7 +135,6 @@ private static boolean isThumbnailAvailable(StorageIO storageIO, int s } return false; - } // Note that this method works on ALL file types for which thumbnail @@ -446,25 +449,11 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) { logger.fine("Null channel for aux object " + THUMBNAIL_SUFFIX + size); // try to generate, if not available and hasn't failed before - logger.log(Level.FINE, (file.isPreviewsHaveFailed() ? "Not trying" : "Trying") + "to generate base64 thumbnail, file id: " + file.getId()); - if (!file.isPreviewsHaveFailed()) { - boolean generated = false; - if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) { - generated = generateImageThumbnail(storageIO, size); - } else if (file.getContentType().equalsIgnoreCase("application/pdf")) { - generated = generatePDFThumbnail(storageIO, size); - } - - if (!generated) { - // Record failure - logger.fine("Failed to generate base64 thumbnail for file id: " + file.getId()); - } else { - // Success - try to open again: - try { - cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); - } catch (Exception ioEx) { - cachedThumbnailChannel = null; - } + if(generateThumbnail(storageIO, size)) { + try { + cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size); + } catch (Exception ioEx) { + cachedThumbnailChannel = null; } } From da4fe474d0607c79030ead51ebb39bb78d8886ae Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Wed, 14 Jun 2023 11:45:34 -0400 Subject: [PATCH 0307/1525] #9638 Payara downloads moved to Nexus --- doc/sphinx-guides/source/installation/prerequisites.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 59de507a264..28b94d33269 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -55,7 +55,7 @@ Installing Payara - Download and install Payara (installed in ``/usr/local/payara5`` in the example commands below):: - # wget https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2022.3/payara-5.2022.3.zip + # wget https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/5.2022.3/payara-5.2022.3.zip # unzip payara-5.2022.3.zip # mv payara5 /usr/local From 3defa0ea39cd4495f24131e410274f283de4302b Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Wed, 14 Jun 2023 14:23:11 -0400 Subject: [PATCH 0308/1525] #9638 add pointer to repo1.maven.org as alternate download location --- doc/sphinx-guides/source/installation/prerequisites.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 28b94d33269..d95aa78bb26 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -59,6 +59,8 @@ Installing Payara # unzip payara-5.2022.3.zip # mv payara5 /usr/local +If nexus.payara.fish is ever down for maintenance, Payara distributions are also available from https://repo1.maven.org/maven2/fish/payara/distributions/payara/ + If you intend to install and run Payara under a service account (and we hope you do), chown -R the Payara hierarchy to root to protect it but give the service account access to the below directories: - Set service account permissions:: From 0b0f8bc0db89319137c986a36771266acfe894f6 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 14 Jun 2023 15:37:34 -0400 Subject: [PATCH 0309/1525] don't show field when citationDate is empty true when not using the publication date in the citation --- src/main/webapp/metadataFragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/metadataFragment.xhtml b/src/main/webapp/metadataFragment.xhtml index 15bec0d9c32..200d2917b9a 100755 --- a/src/main/webapp/metadataFragment.xhtml +++ b/src/main/webapp/metadataFragment.xhtml @@ -57,7 +57,7 @@ #{publicationDate} - + #{bundle['dataset.metadata.citationDate']} Date: Wed, 14 Jun 2023 17:21:43 -0400 Subject: [PATCH 0310/1525] Assume if it's a person and a comma we can get given/family names even if the givenName isn't in the list of known ones --- .../edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java index 60f5c91efc4..431013771c5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java @@ -118,6 +118,15 @@ public static JsonObject getPersonOrOrganization(String name, boolean organizati } } } + if(!isOrganization && givenName == null && name.contains(",")) { + //If we still think this is a person and there's only one comma, assume we can extract the given name and family name + if (!name.replaceFirst(",", "").contains(",")) { + // contributorName=, + String[] fullName = name.split(", "); + givenName = fullName[1]; + familyName = fullName[0]; + } + } JsonObjectBuilder job = new NullSafeJsonBuilder(); job.add("fullName", name); job.add("givenName", givenName); From c3a1cad90daad36cac438e02a450d32777a0131f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 15 Jun 2023 10:57:49 -0400 Subject: [PATCH 0311/1525] add docs and release note #9588 --- doc/sphinx-guides/source/api/native-api.rst | 44 +++++++++++++++++++ .../source/installation/config.rst | 6 +++ 2 files changed, 50 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 369e92ba129..b39cf91337a 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2173,6 +2173,50 @@ Signposting is not supported for draft dataset versions. curl -H "Accept:application/json" "$SERVER_URL/api/datasets/:persistentId/versions/$VERSION/linkset?persistentId=$PERSISTENT_IDENTIFIER" +Get Dataset By Private URL Token +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PRIVATE_URL_TOKEN=a56444bc-7697-4711-8964-e0577f055fd2 + + curl "$SERVER_URL/api/datasets/privateUrlDatasetVersion/$PRIVATE_URL_TOKEN" + +Get Citation +~~~~~~~~~~~~ + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/YD5QDG + export VERSION=1.0 + + curl -H "Accept:application/json" "$SERVER_URL/api/datasets/:persistentId/versions/$VERSION/{version}/citation?persistentId=$PERSISTENT_IDENTIFIER" + +Get Citation by Private URL Token +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export PRIVATE_URL_TOKEN=a56444bc-7697-4711-8964-e0577f055fd2 + + curl "$SERVER_URL/api/datasets/privateUrlDatasetVersion/$PRIVATE_URL_TOKEN/citation" + +.. _get-dataset-summary-field-names: + +Get Summary Field Names +~~~~~~~~~~~~~~~~~~~~~~~ + +See :ref:`:CustomDatasetSummaryFields` in the Installation Guide for how the list of dataset fields that summarize a dataset can be customized. Here's how to list them: + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + + curl "$SERVER_URL/api/datasets/summaryFieldNames" + Files ----- diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index c0eb576d7f5..2abdbbc535b 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -3406,6 +3406,8 @@ Limit on how many guestbook entries to display on the guestbook-responses page. ``curl -X PUT -d 10000 http://localhost:8080/api/admin/settings/:GuestbookResponsesPageDisplayLimit`` +.. _:CustomDatasetSummaryFields: + :CustomDatasetSummaryFields +++++++++++++++++++++++++++ @@ -3415,6 +3417,10 @@ You can replace the default dataset metadata fields that are displayed above fil You have to put the datasetFieldType name attribute in the :CustomDatasetSummaryFields setting for this to work. +The default fields are ``dsDescription,subject,keyword,publication,notesText``. + +This setting can be retrieved via API. See :ref:`get-dataset-summary-field-names` in the API Guide. + :AllowApiTokenLookupViaApi ++++++++++++++++++++++++++ From e59043fec49859e88eaf54f45c7f195c8bdbd7be Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 15 Jun 2023 10:59:36 -0400 Subject: [PATCH 0312/1525] add release note #9588 --- doc/release-notes/9588-datasets-api-extension.md | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 doc/release-notes/9588-datasets-api-extension.md diff --git a/doc/release-notes/9588-datasets-api-extension.md b/doc/release-notes/9588-datasets-api-extension.md new file mode 100644 index 00000000000..f4fd6354d47 --- /dev/null +++ b/doc/release-notes/9588-datasets-api-extension.md @@ -0,0 +1,6 @@ +The following APIs have been added: + +- /api/datasets/summaryFieldNames +- /api/datasets/privateUrlDatasetVersion/{privateUrlToken} +- /api/datasets/privateUrlDatasetVersion/{privateUrlToken}/citation +- /api/datasets/{datasetId}/versions/{version}/citation From 54d7ea31a21028cad71201a6bbc1b96797939380 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 15 Jun 2023 12:27:22 -0400 Subject: [PATCH 0313/1525] fix test --- .../java/edu/harvard/iq/dataverse/util/PersonOrOrgUtilTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtilTest.java index 0e2fc1596c9..0567d1e2b46 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtilTest.java @@ -86,7 +86,7 @@ public void testName() { // test only family name verifyIsPerson("Cadili", null, null); - verifyIsPerson("kcjim11, kcjim11", null, null); + verifyIsPerson("kcjim11, kcjim11", "kcjim11", "kcjim11"); verifyIsPerson("Bartholomew 3, James", "James", "Bartholomew 3"); } From 30bd77a7180650abee618aec9cf62392db616786 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 15 Jun 2023 12:55:40 -0400 Subject: [PATCH 0314/1525] update test fake first/last names are now recognized --- .../harvard/iq/dataverse/export/OpenAireExportUtilTest.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java index 29486339785..f61c832ac6c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java @@ -461,22 +461,28 @@ public void testWriteContributorsElementComplete() throws XMLStreamException, IO + "" + "" + "LastProducer1, FirstProducer1" + + "FirstProducer1LastProducer1" + "ProducerAffiliation1" + "" + "LastProducer2, FirstProducer2" + + "FirstProducer2LastProducer2" + "ProducerAffiliation2" + "" + "" + "LastContributor1, FirstContributor1" + + "FirstContributor1LastContributor1" + "" + "" + "LastContributor2, FirstContributor2" + + "FirstContributor2LastContributor2" + "" + "LastDistributor1, FirstDistributor1" + + "FirstDistributor1LastDistributor1" + "DistributorAffiliation1" + "" + "" + "LastDistributor2, FirstDistributor2" + + "FirstDistributor2LastDistributor2" + "DistributorAffiliation2" + "" + "", From 683c8c4a1d027761b399f42021319955f5294030 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 15 Jun 2023 13:04:59 -0400 Subject: [PATCH 0315/1525] further fix --- .../iq/dataverse/export/OpenAireExportUtilTest.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java index f61c832ac6c..76ca853d5cc 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java @@ -460,28 +460,28 @@ public void testWriteContributorsElementComplete() throws XMLStreamException, IO + "ContactAffiliation3" + "" + "" - + "LastProducer1, FirstProducer1" + + "LastProducer1, FirstProducer1" + "FirstProducer1LastProducer1" + "ProducerAffiliation1" + "" - + "LastProducer2, FirstProducer2" + + "LastProducer2, FirstProducer2" + "FirstProducer2LastProducer2" + "ProducerAffiliation2" + "" + "" - + "LastContributor1, FirstContributor1" + + "LastContributor1, FirstContributor1" + "FirstContributor1LastContributor1" + "" + "" - + "LastContributor2, FirstContributor2" + + "LastContributor2, FirstContributor2" + "FirstContributor2LastContributor2" + "" - + "LastDistributor1, FirstDistributor1" + + "LastDistributor1, FirstDistributor1" + "FirstDistributor1LastDistributor1" + "DistributorAffiliation1" + "" + "" - + "LastDistributor2, FirstDistributor2" + + "LastDistributor2, FirstDistributor2" + "FirstDistributor2LastDistributor2" + "DistributorAffiliation2" + "" From aada6909b0f594e3b65ad273a92fc45079bfcb3f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 5 May 2023 15:06:35 -0400 Subject: [PATCH 0316/1525] fix search a11y --- src/main/webapp/search-include-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/search-include-fragment.xhtml b/src/main/webapp/search-include-fragment.xhtml index 217ea539533..d3a84f491ae 100644 --- a/src/main/webapp/search-include-fragment.xhtml +++ b/src/main/webapp/search-include-fragment.xhtml @@ -28,7 +28,7 @@ - From 3d0b6bfd434cf74079d1ca17c2a587fce103242d Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 5 May 2023 17:14:56 -0400 Subject: [PATCH 0317/1525] checkbox aria-labels for a11y using passthrough attributes --- src/main/java/propertyFiles/Bundle.properties | 6 +++--- src/main/webapp/search-include-fragment.xhtml | 3 +++ 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index c16a1f23bd1..446730c3ab4 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1062,9 +1062,9 @@ dataverse.widgets.advanced.url.watermark=http://www.example.com/page-name dataverse.widgets.advanced.invalid.message=Please enter a valid URL dataverse.widgets.advanced.success.message=Successfully updated your Personal Website URL dataverse.widgets.advanced.failure.message=The dataverse Personal Website URL has not been updated. -facet.collection.label=Show Collections -facet.dataset.label=Show Data Projects -facet.datafile.label=Show Files +facet.collection.label=Toggle Collections +facet.dataset.label=Toggle Data Projects +facet.datafile.label=Toggle Files # permissions-manage.xhtml dataverse.permissions.title=Permissions diff --git a/src/main/webapp/search-include-fragment.xhtml b/src/main/webapp/search-include-fragment.xhtml index d3a84f491ae..718df813348 100644 --- a/src/main/webapp/search-include-fragment.xhtml +++ b/src/main/webapp/search-include-fragment.xhtml @@ -105,6 +105,7 @@ + @@ -136,6 +137,7 @@ + @@ -167,6 +169,7 @@ + From cd2660f4c8f38eb60e3d2c031fad3268b0196271 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 9 May 2023 10:50:29 -0400 Subject: [PATCH 0318/1525] Dataset page accessibility fixes --- src/main/webapp/dataset-license-terms.xhtml | 1 + src/main/webapp/file-edit-popup-fragment.xhtml | 1 + src/main/webapp/filesFragment.xhtml | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index 86e52092622..c5958697a20 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -52,6 +52,7 @@ + diff --git a/src/main/webapp/file-edit-popup-fragment.xhtml b/src/main/webapp/file-edit-popup-fragment.xhtml index 8f8de725bdc..ffc4a1fcef7 100644 --- a/src/main/webapp/file-edit-popup-fragment.xhtml +++ b/src/main/webapp/file-edit-popup-fragment.xhtml @@ -76,6 +76,7 @@ +
    diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 6122b86b274..3cc26779c1a 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -108,7 +108,7 @@ - + From 824ac8f7a09cea1ad0e68700b5a2cd31ef118ef7 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 17 Apr 2023 15:50:10 -0400 Subject: [PATCH 0319/1525] another license a11y in the terms tab --- src/main/webapp/resources/css/structure.css | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/webapp/resources/css/structure.css b/src/main/webapp/resources/css/structure.css index 2435663de50..f2eaad4b2c3 100644 --- a/src/main/webapp/resources/css/structure.css +++ b/src/main/webapp/resources/css/structure.css @@ -1128,6 +1128,10 @@ padding-right:0px; a { color:#3174AF; } + +.label-warning a, .bg-citation a, .terms .help-block a, #panelCollapseTOU .help-block a { + text-decoration: underline; +} .pagination>.disabled>a { color:#767676; } From a24bcedef5ab6d2c0ddd57027b62426625e169a6 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 5 May 2023 18:42:12 -0400 Subject: [PATCH 0320/1525] remove continues --- .../edu/harvard/iq/dataverse/search/IndexServiceBean.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 2d0bf8f467c..b1659e1858b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -1160,12 +1160,15 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set Date: Fri, 16 Jun 2023 10:08:04 +0200 Subject: [PATCH 0321/1525] Remove dublicated information Subchapter "Create a New Role in a Dataverse Collection" with the same content in chapter "Dataverse Collection" --- doc/sphinx-guides/source/api/native-api.rst | 33 --------------------- 1 file changed, 33 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 369e92ba129..70bca7e65a7 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3014,39 +3014,6 @@ Optionally, you may use a third query parameter "sendEmailNotification=false" to Roles ----- -Create a New Role in a Dataverse Collection -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Creates a new role under Dataverse collection ``id``. Needs a json file with the role description: - -.. code-block:: bash - - export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - export SERVER_URL=https://demo.dataverse.org - export ID=root - - curl -H X-Dataverse-key:$API_TOKEN -X POST -H "Content-type:application/json" $SERVER_URL/api/dataverses/$ID/roles --upload-file roles.json - -The fully expanded example above (without environment variables) looks like this: - -.. code-block:: bash - - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" https://demo.dataverse.org/api/dataverses/root/roles --upload-file roles.json - -Where ``roles.json`` looks like this:: - - { - "alias": "sys1", - "name": “Restricted System Roleâ€, - "description": “A person who may only add datasets.â€, - "permissions": [ - "AddDataset" - ] - } - -.. note:: Only a Dataverse installation account with superuser permissions is allowed to create roles in a Dataverse Collection. - - Show Role ~~~~~~~~~ From a841ce66ec390130fba45340fb6e8f8bd666e3fd Mon Sep 17 00:00:00 2001 From: Leonhard Maylein Date: Fri, 16 Jun 2023 11:52:16 +0200 Subject: [PATCH 0322/1525] Example for roles.json in chapter "Roles --- doc/sphinx-guides/source/api/native-api.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 70bca7e65a7..0844c3a38ea 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3014,6 +3014,20 @@ Optionally, you may use a third query parameter "sendEmailNotification=false" to Roles ----- +JSON Representation of a Role +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The JSON representation of a role (``roles.json``) looks like this:: + + { + "alias": "sys1", + "name": “Restricted System Roleâ€, + "description": “A person who may only add datasets.â€, + "permissions": [ + "AddDataset" + ] + } + Show Role ~~~~~~~~~ From 6e2006aa68a2857df0eb7da0a4b7b1ef0c6b3f46 Mon Sep 17 00:00:00 2001 From: Leonhard Maylein Date: Fri, 16 Jun 2023 12:42:31 +0200 Subject: [PATCH 0323/1525] Link to the JSON representation of a role, add example --- doc/sphinx-guides/source/api/native-api.rst | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 0844c3a38ea..5255e3e5d14 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -315,16 +315,7 @@ The fully expanded example above (without environment variables) looks like this curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" https://demo.dataverse.org/api/dataverses/root/roles --upload-file roles.json -Where ``roles.json`` looks like this:: - - { - "alias": "sys1", - "name": “Restricted System Roleâ€, - "description": “A person who may only add datasets.â€, - "permissions": [ - "AddDataset" - ] - } +``roles.json`` see :ref:`json-representation-of-a-role` .. note:: Only a Dataverse installation account with superuser permissions is allowed to create roles in a Dataverse Collection. @@ -3893,6 +3884,16 @@ Create Global Role Creates a global role in the Dataverse installation. The data POSTed are assumed to be a role JSON. :: POST http://$SERVER/api/admin/roles + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/admin/roles --upload-file roles.json + +``roles.json`` see :ref:`json-representation-of-a-role` Delete Global Role ~~~~~~~~~~~~~~~~~~ From 48f89bc357064068bf27a2cdf467f3a8595f0172 Mon Sep 17 00:00:00 2001 From: Leonhard Maylein Date: Fri, 16 Jun 2023 12:50:10 +0200 Subject: [PATCH 0324/1525] Add information about length contraint of role alias --- doc/sphinx-guides/source/api/native-api.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 5255e3e5d14..52bfc3e5ac2 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3019,6 +3019,8 @@ The JSON representation of a role (``roles.json``) looks like this:: ] } +.. note:: alias is constrained to a length of 16 characters + Show Role ~~~~~~~~~ From ed862b74edc6697a9c606ee7efd664bd8b8e15a7 Mon Sep 17 00:00:00 2001 From: Leonhard Maylein Date: Fri, 16 Jun 2023 13:09:10 +0200 Subject: [PATCH 0325/1525] define label _json-representation-of-a-role --- doc/sphinx-guides/source/api/native-api.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 52bfc3e5ac2..44140656c68 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3005,6 +3005,8 @@ Optionally, you may use a third query parameter "sendEmailNotification=false" to Roles ----- +.. _json-representation-of-a-role: + JSON Representation of a Role ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 60d34e2f5718a746a2540af4ef897f9ea4470748 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 16 Jun 2023 16:17:41 +0200 Subject: [PATCH 0326/1525] restored missing side effect --- .../java/edu/harvard/iq/dataverse/DatasetPage.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index cc270cb62d5..98169a831f4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -684,12 +684,14 @@ public void showAll(){ private List selectFileMetadatasForDisplay() { final Set searchResultsIdSet; - if (StringUtil.isEmpty(fileLabelSearchTerm) && StringUtil.isEmpty(fileTypeFacet) && StringUtil.isEmpty(fileAccessFacet) && StringUtil.isEmpty(fileTagsFacet)) { - // But, if no search terms were specified, we return the full - // list of the files in the version: + if (isIndexedVersion() && StringUtil.isEmpty(fileLabelSearchTerm) && StringUtil.isEmpty(fileTypeFacet) && StringUtil.isEmpty(fileAccessFacet) && StringUtil.isEmpty(fileTagsFacet)) { + // Indexed version: we need facets, they are set as a side effect of getFileIdsInVersionFromSolr method. + // But, no search terms were specified, we will return the full + // list of the files in the version: we discard the getFileIdsInVersionFromSolr. + getFileIdsInVersionFromSolr(workingVersion.getId(), this.fileLabelSearchTerm); // Since the search results should include the full set of fmds if all the // terms/facets are empty, setting them to null should just be - // an optimization for the loop below + // an optimization to skip the loop below searchResultsIdSet = null; } else if (isIndexedVersion()) { // We run the search even if no search term and/or facets are From 1997a44f7ce5411507148ae20627cc747f3ec686 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 16 Jun 2023 16:20:42 +0200 Subject: [PATCH 0327/1525] improved comment --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 98169a831f4..3d608153ba3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -687,7 +687,7 @@ private List selectFileMetadatasForDisplay() { if (isIndexedVersion() && StringUtil.isEmpty(fileLabelSearchTerm) && StringUtil.isEmpty(fileTypeFacet) && StringUtil.isEmpty(fileAccessFacet) && StringUtil.isEmpty(fileTagsFacet)) { // Indexed version: we need facets, they are set as a side effect of getFileIdsInVersionFromSolr method. // But, no search terms were specified, we will return the full - // list of the files in the version: we discard the getFileIdsInVersionFromSolr. + // list of the files in the version: we discard the result from getFileIdsInVersionFromSolr. getFileIdsInVersionFromSolr(workingVersion.getId(), this.fileLabelSearchTerm); // Since the search results should include the full set of fmds if all the // terms/facets are empty, setting them to null should just be From 3413a4c57ee2b19b66213e1a69a37e5eaa32a80b Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 16 Jun 2023 11:05:00 -0400 Subject: [PATCH 0328/1525] explain that setting can be found via API #9431 --- doc/sphinx-guides/source/api/native-api.rst | 2 ++ doc/sphinx-guides/source/installation/config.rst | 2 ++ 2 files changed, 4 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index cfd8eca5c3b..0d978f78805 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2963,6 +2963,8 @@ The response is a JSON object described in the :doc:`/api/external-tools` sectio curl -H "X-Dataverse-key: $API_TOKEN" -H "Accept:application/json" "$SERVER_URL/api/files/$FILE_ID/metadata/$FILEMETADATA_ID/toolparams/$TOOL_ID +.. _get-fixity-algorithm: + Get Fixity Algorithm ~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index fbeb112e033..ba709cb2940 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -3125,6 +3125,8 @@ The default checksum algorithm used is MD5 and should be sufficient for establis To update the algorithm used for existing files, see :ref:`UpdateChecksums` +The fixity checksum algorithm in use can be discovered via API. See :ref:`get-fixity-algorithm` in the API Guide. + .. _:PVMinLength: :PVMinLength From 4f782d623eeeec1352a359e5d27d429bb0d50e27 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 16 Jun 2023 18:21:36 +0200 Subject: [PATCH 0329/1525] crawles should not follow the filter links --- src/main/webapp/filesFragment.xhtml | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 6122b86b274..99ab30fed68 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -144,7 +144,7 @@
    + + + +
    + +
    + + + +
    +
    +
    + +
    + + + +
    +
    +
    + +
    + + + +
    +
    +
    + +
    + + + +
    +
    +
    + +
    + +
    + + + + + + + + + + + +
    +
    +
    +
    +
    Date: Thu, 22 Jun 2023 10:34:11 -0400 Subject: [PATCH 0378/1525] change render param - not clear this fragment stays here though --- src/main/webapp/file-download-popup-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/file-download-popup-fragment.xhtml b/src/main/webapp/file-download-popup-fragment.xhtml index e1020c85e69..6fe3863b85f 100644 --- a/src/main/webapp/file-download-popup-fragment.xhtml +++ b/src/main/webapp/file-download-popup-fragment.xhtml @@ -138,7 +138,7 @@ value="#{MarkupChecker:sanitizeBasicHTML(workingVersion.termsOfUseAndAccess.termsOfAccess)}" escape="false" />
    - + #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} @@ -167,7 +168,7 @@ action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'original' )}" update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> - + @@ -186,7 +187,7 @@ disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> - + #{bundle['file.downloadBtn.format.tab']} @@ -205,7 +206,7 @@ action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'RData' )}" update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> - + #{bundle['file.downloadBtn.format.rdata']} From 58bc6c9a264ca7ef5caf44ad3fbc3a04a38f67fb Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 22 Jun 2023 15:40:48 -0400 Subject: [PATCH 0381/1525] missing actionListeners --- src/main/webapp/filesFragment.xhtml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 4c1a943b86e..7e1cb4ac4cd 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -444,6 +444,7 @@ onclick="if (!testFilesSelected()) return false;" action="#{DatasetPage.startDownloadSelectedOriginal()}" update="@form" oncomplete="showPopup();"> + #{bundle.download} @@ -462,6 +463,7 @@ update="@form" oncomplete="showPopup();" onclick="if (!testFilesSelected()) return false;" actionListener="#{DatasetPage.startDownloadSelectedOriginal()}"> + #{bundle.downloadOriginal} From 60c4db0d593b455a02d6df2ebe7801b8c9dbca5e Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 22 Jun 2023 15:42:24 -0400 Subject: [PATCH 0382/1525] add missing params, change fileDownloadHelper to EJB --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- src/main/webapp/dataset.xhtml | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index f01fe00937e..40b0ba1a010 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -244,7 +244,7 @@ public enum DisplayMode { DatasetVersionUI datasetVersionUI; @Inject PermissionsWrapper permissionsWrapper; - @Inject + @EJB FileDownloadHelper fileDownloadHelper; @Inject ThumbnailServiceWrapper thumbnailServiceWrapper; diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index c39042a91ce..1cc8213ecd5 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1511,9 +1511,11 @@ + + From 1e8495cff14955a12f7829a55fb7b2a434c537e6 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 22 Jun 2023 16:07:04 -0400 Subject: [PATCH 0383/1525] use Inject and restore getter/setter --- .../java/edu/harvard/iq/dataverse/DatasetPage.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 40b0ba1a010..e890752c19c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -244,7 +244,7 @@ public enum DisplayMode { DatasetVersionUI datasetVersionUI; @Inject PermissionsWrapper permissionsWrapper; - @EJB + @Inject FileDownloadHelper fileDownloadHelper; @Inject ThumbnailServiceWrapper thumbnailServiceWrapper; @@ -5476,6 +5476,14 @@ public FileDownloadServiceBean getFileDownloadService() { public void setFileDownloadService(FileDownloadServiceBean fileDownloadService) { this.fileDownloadService = fileDownloadService; } + + public FileDownloadHelper getFileDownloadHelper() { + return fileDownloadHelper; + } + + public void setFileDownloadHelper(FileDownloadHelper fileDownloadHelper) { + this.fileDownloadHelper = fileDownloadHelper; + } public GuestbookResponseServiceBean getGuestbookResponseService() { From a8cfdd7a9572f7bc6342d0c9f0614d764b83c9d8 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 23 Jun 2023 09:06:00 -0400 Subject: [PATCH 0384/1525] update windows dev page #9540 --- .../source/developers/dev-environment.rst | 2 +- .../source/developers/windows.rst | 83 ++----------------- 2 files changed, 8 insertions(+), 77 deletions(-) diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index 58b25d8b941..14087650eb4 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -22,7 +22,7 @@ Supported Operating Systems Mac OS X or Linux is required because the setup scripts assume the presence of standard Unix utilities. -Windows is not well supported, unfortunately, but Vagrant and Minishift environments are described in the :doc:`windows` section. +Windows is gaining support through Docker as described in the :doc:`windows` section. Install Java ~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index e278b193e12..53578fe980c 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -2,86 +2,17 @@ Windows Development =================== -Development on Windows is not well supported, unfortunately. You will have a much easier time if you develop on Mac or Linux as described under :doc:`dev-environment` section. - -Vagrant commands appear below and were tested on Windows 10 but the Vagrant environment is currently broken. Please see https://github.com/IQSS/dataverse/issues/6849 - -There is a newer, experimental Docker option described under :doc:`/container/dev-usage` in the Container Guide. +Historically, development on Windows is `not well supported `_ but as of 2023 a container-based approach is recommended. .. contents:: |toctitle| :local: -Running the Dataverse Software in Vagrant ------------------------------------------ - -Install Vagrant -~~~~~~~~~~~~~~~ - -Download and install Vagrant from https://www.vagrantup.com - -Vagrant advises you to reboot but let's install VirtualBox first. - -Install VirtualBox -~~~~~~~~~~~~~~~~~~ - -Download and install VirtualBox from https://www.virtualbox.org - -Note that we saw an error saying "Oracle VM VirtualBox 5.2.8 Setup Wizard ended prematurely" but then we re-ran the installer and it seemed to work. - -Reboot -~~~~~~ - -Again, Vagrant asks you to reboot, so go ahead. - -Install Git -~~~~~~~~~~~ - -Download and install Git from https://git-scm.com - -Configure Git to use Unix Line Endings -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Launch Git Bash and run the following commands: - -``git config --global core.autocrlf input`` - -Pro tip: Use Shift-Insert to paste into Git Bash. - -See also https://help.github.com/articles/dealing-with-line-endings/ - -If you skip this step you are likely to see the following error when you run ``vagrant up``. - -``/tmp/vagrant-shell: ./install: /usr/bin/perl^M: bad interpreter: No such file or directory`` - -Clone Git Repo -~~~~~~~~~~~~~~ - -From Git Bash, run the following command: - -``git clone https://github.com/IQSS/dataverse.git`` - -vagrant up -~~~~~~~~~~ - -From Git Bash, run the following commands: - -``cd dataverse`` - -The ``dataverse`` directory you changed is the one you just cloned. Vagrant will operate on a file called ``Vagrantfile``. - -``vagrant up`` - -After a long while you hopefully will have a Dataverse installation available at http://localhost:8888 - -Improving Windows Support -------------------------- - -Windows Subsystem for Linux -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Running Dataverse in Docker on Windows +-------------------------------------- -We have been unable to get Windows Subsystem for Linux (WSL) to work. We tried following the steps at https://docs.microsoft.com/en-us/windows/wsl/install-win10 but the "Get" button was greyed out when we went to download Ubuntu. +See the `post `_ by Akio Sone for additional details, but please observe the following: -Discussion and Feedback -~~~~~~~~~~~~~~~~~~~~~~~ +- In git, the line-ending setting should be set to always LF (line feed, ``core.autocrlf=input``) +- You must have jq installed: https://jqlang.github.io/jq/download/ -For more discussion of Windows support for Dataverse Software development see our community list thread `"Do you want to develop on Windows?" `_ We would be happy to incorporate feedback from Windows developers into this page. The :doc:`documentation` section describes how. +One the above is all set you can move on to :doc:`/container/dev-usage` in the Container Guide. From acb1a52f2611b54d5732c100816c42c94785dbdf Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 23 Jun 2023 09:11:07 -0400 Subject: [PATCH 0385/1525] how to run a single container #9540 --- doc/sphinx-guides/source/developers/dev-environment.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index 14087650eb4..1159121f491 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -184,6 +184,10 @@ If you want to remove the containers, then run: ``docker-compose -f docker-compose-dev.yml down`` +If you want to run a single container (the mail server, for example) then run: + +``docker-compose -f docker-compose-dev.yml up dev_smtp`` + For a fresh installation, and before running the Software Installer Script, it is recommended to delete the docker-dev-env folder to avoid installation problems due to existing data in the containers. Run the Dataverse Software Installer Script From 6bed5a33df0a033f8ebf904bb3f14b016324efa4 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 23 Jun 2023 09:25:58 -0400 Subject: [PATCH 0386/1525] encourage Docker, remove Vagrant #9540 #9616 --- .../source/developers/dev-environment.rst | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index 1159121f491..214d5da8192 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -7,12 +7,20 @@ These instructions are purposefully opinionated and terse to help you get your d .. contents:: |toctitle| :local: -Quick Start ------------ +Quick Start (Docker) +-------------------- -The quickest way to get the Dataverse Software running is to use Vagrant as described in the :doc:`tools` section, or use Docker containers as described the :doc:`../container/dev-usage` section of the Container Guide. +The quickest way to get Dataverse running is in Docker as explained in :doc:`../container/dev-usage` section of the Container Guide. -For day to day development work, we recommended the following setup. + +Classic Dev Environment +----------------------- + +Since before Docker existed, we have encouraged installing Dataverse and all its dependencies directly on your development machine, as described below. This can be thought of as the "classic" development environment for Dataverse. + +However, in 2023 we decided that we'd like to encourage all developers to start using Docker instead and opened https://github.com/IQSS/dataverse/issues/9616 to indicate that we plan to rewrite this page to recommend the use of Docker. + +There's nothing wrong with the classic instructions below and we don't plan to simply delete them. They are a valid alternative to running Dataverse in Docker. We will likely move them to another page. Set Up Dependencies ------------------- From eaa0ef5a4acf147fbc50b6dd5b136fd1ebc1200a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 23 Jun 2023 09:31:33 -0400 Subject: [PATCH 0387/1525] improve quickstart #9540 --- .../source/container/dev-usage.rst | 20 ++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst index 15c4a244f39..3fbe55766d5 100644 --- a/doc/sphinx-guides/source/container/dev-usage.rst +++ b/doc/sphinx-guides/source/container/dev-usage.rst @@ -6,6 +6,21 @@ Please note! This Docker setup is not for production! .. contents:: |toctitle| :local: +Quickstart +---------- + +First, install Java 11 and Maven. + +After cloning the repo, try this: + +``mvn -Pct clean package docker:run`` + +After some time you should be able to log in: + +- url: http://localhost:8080 +- username: dataverseAdmin +- password: admin1 + Intro ----- @@ -17,11 +32,6 @@ To test drive these local changes to the Dataverse codebase in a containerized a setup described in :doc:`../developers/dev-environment`), you must a) build the application and b) run it in addition to the necessary dependencies. (Which might involve building a new local version of the :doc:`configbaker-image`.) -| **TL;DR** *I have all that, just give me containers!* -| Execute ``mvn -Pct clean package docker:run``, wait for "done" message and log in at http://localhost:8080. -| (Username: ``dataverseAdmin``, Password: ``admin1``) - - .. _dev-build: Building From 1400e07486223465641e289756c7d38577dbf00b Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 23 Jun 2023 12:28:15 -0400 Subject: [PATCH 0388/1525] fix gb render conditions, initial fix for download buttons --- .../webapp/guestbook-terms-popup-fragment.xhtml | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index 829fa6539b9..ab75ffbe3e7 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -40,7 +40,7 @@ - + + + + #{bundle.preview} "#{empty(fileMetadata.directoryLabel) ? "":fileMetadata.directoryLabel.concat("/")}#{fileMetadata.label}" + + + From 5f95a8bb1b5710cd5eea0f8c826ca2c3d20e8fee Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 23 Jun 2023 14:17:37 -0400 Subject: [PATCH 0392/1525] #9663 fix test error message --- .../iq/dataverse/externaltools/ExternalToolServiceBeanTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java index 7cc28c82cd6..7b33cb8a19f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java @@ -338,7 +338,7 @@ public void testParseAddExternalToolInputWrongType() { } assertNotNull(expectedException); System.out.println("exception: " + expectedException); - assertEquals("Type must be one of these values: [explore, configure, preview].", expectedException.getMessage()); + assertEquals("Type must be one of these values: [explore, configure, preview, query].", expectedException.getMessage()); } @Test From 2b3a1cc8925c154b43cb1b91721df8c87287ee70 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 23 Jun 2023 15:30:51 -0400 Subject: [PATCH 0393/1525] switch to guestbook-terms-popup-fragment --- src/main/webapp/file-download-popup-fragment.xhtml | 1 + src/main/webapp/file.xhtml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/webapp/file-download-popup-fragment.xhtml b/src/main/webapp/file-download-popup-fragment.xhtml index 6fe3863b85f..65b5cc9a851 100644 --- a/src/main/webapp/file-download-popup-fragment.xhtml +++ b/src/main/webapp/file-download-popup-fragment.xhtml @@ -1,3 +1,4 @@ + - + From 51005bad3afd714aa8edbbbb03eb0faf604470c2 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 23 Jun 2023 15:31:22 -0400 Subject: [PATCH 0394/1525] update fragment with license info and buttons for other views --- .../guestbook-terms-popup-fragment.xhtml | 195 +++++++++++++++--- 1 file changed, 168 insertions(+), 27 deletions(-) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index ab75ffbe3e7..4bddbb0dd38 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -9,37 +9,138 @@ xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs"> -

    - #{someActivelyEmbargoedFiles ? bundle['file.requestAccessTermsDialog.embargoed.tip'] : bundle['file.requestAccessTermsDialog.tip']} -

    -

    - #{bundle['file.requestAccessTermsDialog.embargoed']} -

    -
    -
    -
    + + + +
    + -
    -
    -
    - -
    -
    -
    +
    -
    -
    -
    +
    -
    +
    diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 48b707edd92..16ba1042fd6 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -530,14 +530,15 @@ fileMetadata.dataFile.filePackage and systemConfig.HTTPDownload --> + href="#{widgetWrapper.wrapURL('/file.xhtml?'.concat(!empty fileMetadata.dataFile.globalId ? 'persistentId=' : 'fileId=').concat(!empty fileMetadata.dataFile.globalId ? fileMetadata.dataFile.globalId.asString() : fileMetadata.dataFile.id).concat('&version=').concat(fileMetadata.datasetVersion.friendlyVersionNumber)).concat('&toolType=QUERY')}"> #{bundle.preview} "#{empty(fileMetadata.directoryLabel) ? "":fileMetadata.directoryLabel.concat("/")}#{fileMetadata.label}" + + href="#{widgetWrapper.wrapURL('/file.xhtml?'.concat(!empty fileMetadata.dataFile.globalId ? 'persistentId=' : 'fileId=').concat(!empty fileMetadata.dataFile.globalId ? fileMetadata.dataFile.globalId.asString() : fileMetadata.dataFile.id).concat('&version=').concat(fileMetadata.datasetVersion.friendlyVersionNumber)).concat('&toolType=PREVIEW')}"> #{bundle.preview} "#{empty(fileMetadata.directoryLabel) ? "":fileMetadata.directoryLabel.concat("/")}#{fileMetadata.label}" From b2ac9604dd029530a2a03343beb000dfe8842d4f Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 28 Jun 2023 17:00:04 -0400 Subject: [PATCH 0427/1525] more notes added. #9670 --- doc/release-notes/5.14-release-notes.md | 45 +++++++++++++++++++ doc/release-notes/9148-license-via-api.md | 28 ------------ .../9150-improved-external-vocab-supprt.md | 1 - doc/release-notes/9175-external-exporters.md | 11 ----- 4 files changed, 45 insertions(+), 40 deletions(-) delete mode 100644 doc/release-notes/9148-license-via-api.md delete mode 100644 doc/release-notes/9150-improved-external-vocab-supprt.md delete mode 100644 doc/release-notes/9175-external-exporters.md diff --git a/doc/release-notes/5.14-release-notes.md b/doc/release-notes/5.14-release-notes.md index 1937b524628..fac8ffc4d75 100644 --- a/doc/release-notes/5.14-release-notes.md +++ b/doc/release-notes/5.14-release-notes.md @@ -81,6 +81,50 @@ It is now possible to configure registering PIDs for files in individual collect For example, registration of PIDs for files can be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. See the [:FilePIDsEnabled](https://guides.dataverse.org/en/latest/installation/config.html#filepidsenabled) section of the Configuration guide for details. (PR #9614) +### Mechanism Added for Adding External Exporters + +It is now possible for third parties to develop and share code to provide new metadata export formats for Dataverse. Export formats can be made available via the Dataverse UI and API or configured for use in Harvesting. Dataverse now provides developers with a separate dataverse-spi JAR file that contains the Java interfaces and classes required to create a new metadata Exporter. Once a new Exporter has been created and packaged as a JAR file, administrators can use it by specifying a local directory for third party Exporters, dropping then Exporter JAR there, and restarting Payara. This mechanism also allows new Exporters to replace any of Dataverse's existing metadata export formats. (PR #9175) + +#### Backward Incompatibilities + +**TODO: add below** + +Care should be taken when replacing Dataverse's internal metadata export formats as third party code, including other third party Exporters may depend on the contents of those export formats. When replacing an existing format, one must also remember to delete the cached metadata export files or run the reExport command for the metadata exports of existing datasets to be updated. + +#### New JVM/MicroProfile Settings + +dataverse.spi.export.directory - specifies a directory, readable by the Dataverse server. Any Exporter JAR files placed in this directory will be read by Dataverse and used to add/replace the specified metadata format. + +### Handling of license information fixed in the API + +(PR #9568) + +**TODO: add this under "backward incompatibility"** + +When publishing a dataset via API, it now requires the dataset to either have a standard license configured, or have valid Custom Terms of Use (if allowed by the instance). Attempting to publish a dataset without such **will fail with an error message**. This introduces a backward incompatibility, and if you have scripts that automatically create, update and publish datasets, this last step may start failing. Because, unfortunately, there were some problems with the datasets APIs that made it difficult to manage licenses, so an API user was likely to end up with a dataset missing either of the above. In this release we have addressed it by making the following fixes: + +We fixed the incompatibility between the format in which license information was *exported* in json, and the format the create and update APIs were expecting it for *import* (https://github.com/IQSS/dataverse/issues/9155). This means that the following json format can now be imported: +``` +"license": { + "name": "CC0 1.0", + "uri": "http://creativecommons.org/publicdomain/zero/1.0" +} +``` +However, for the sake of backward compatibility the old format +``` +"license" : "CC0 1.0" +``` +will be accepted as well. + +We have added the default license (CC0) to the model json file that we provide and recommend to use as the model in the Native API Guide (https://github.com/IQSS/dataverse/issues/9364). + +And we have corrected the misleading language in the same guide where we used to recommend to users that they select, edit and re-import only the `.metadataBlocks` fragment of the json metadata representing the latest version. There are in fact other useful pieces of information that need to be preserved in the update (such as the `"license"` section above). So the recommended way of creating base json for updates via the API is to select *everything but* the `"files"` section, with (for example) the following `jq` command: + +``` +jq '.data | del(.files)' +``` + +Please see the [Update Metadata For a Dataset](https://guides.dataverse.org/en/latest/api/native-api.html#update-metadata-for-a-dataset) section of our Native Api guide for more information. ### Changes and fixes in this release not already mentioned above include: @@ -89,6 +133,7 @@ For example, registration of PIDs for files can be enabled in a specific collect - A feature flag called "api-session-auth" has been added temporarily to aid in the development of the new frontend (#9063) but will be removed once bearer tokens (#9229) have been implemented. There is a security risk (CSRF) in enabling this flag! Do not use it in production! For more information, see http://preview.guides.gdcc.io/en/develop/installation/config.html#feature-flags - Changes made in v5.13 and v5.14 in multiple PRs to improve the embedded Schema.org metadata in dataset pages will only be propagated to the Schema.Org JSON-LD metadata export if a reExportAll() is done. (PR #9102) - TODO: The 5.14 release notes should include the standard instructions for doing a reExportAll after updating the code, on account of the item above. (L.A.) +- It is now possible to write external vocabulary scripts that target a single child field in a metadata block. Example scripts are now available at https://github.com/gdcc/dataverse-external-vocab-support that can be configured to support lookup from the Research Orgnaization Registry (ROR) for the Author Affiliation Field and for the CrossRef Funding Registry (Fundreg) in the Funding Information/Agency field, both in the standard Citation metadata block. Application if these scripts to other fields, and the development of other scripts targetting child fields are now possible (PR #9402) ## New JVM Options and MicroProfile Config Options diff --git a/doc/release-notes/9148-license-via-api.md b/doc/release-notes/9148-license-via-api.md deleted file mode 100644 index 4c27af941e3..00000000000 --- a/doc/release-notes/9148-license-via-api.md +++ /dev/null @@ -1,28 +0,0 @@ -# License management via API - -See https://github.com/IQSS/dataverse/issues/9148. - -When publishing a dataset via API, it now requires the dataset to either have a standard license configured, or have valid Custom Terms of Use (if allowed by the instance). Attempting to publish a dataset without such **will fail with an error message**. This introduces a backward incompatibility, and if you have scripts that automatically create, update and publish datasets, this last step may start failing. Because, unfortunately, there were some problems with the datasets APIs that made it difficult to manage licenses, so an API user was likely to end up with a dataset missing either of the above. In this release we have addressed it by making the following fixes: - -We fixed the incompatibility between the format in which license information was *exported* in json, and the format the create and update APIs were expecting it for *import* (https://github.com/IQSS/dataverse/issues/9155). This means that the following json format can now be imported: -``` -"license": { - "name": "CC0 1.0", - "uri": "http://creativecommons.org/publicdomain/zero/1.0" -} -``` -However, for the sake of backward compatibility the old format -``` -"license" : "CC0 1.0" -``` -will be accepted as well. - -We have added the default license (CC0) to the model json file that we provide and recommend to use as the model in the Native API Guide (https://github.com/IQSS/dataverse/issues/9364). - -And we have corrected the misleading language in the same guide where we used to recommend to users that they select, edit and re-import only the `.metadataBlocks` fragment of the json metadata representing the latest version. There are in fact other useful pieces of information that need to be preserved in the update (such as the `"license"` section above). So the recommended way of creating base json for updates via the API is to select *everything but* the `"files"` section, with (for example) the following `jq` command: - -``` -jq '.data | del(.files)' -``` - -Please see the [Update Metadata For a Dataset](https://guides.dataverse.org/en/latest/api/native-api.html#update-metadata-for-a-dataset) section of our Native Api guide for more information. diff --git a/doc/release-notes/9150-improved-external-vocab-supprt.md b/doc/release-notes/9150-improved-external-vocab-supprt.md deleted file mode 100644 index 5ae678e185b..00000000000 --- a/doc/release-notes/9150-improved-external-vocab-supprt.md +++ /dev/null @@ -1 +0,0 @@ -It is now possible to write external vocabulary scripts that target a single child field in a metadata block. Example scripts are now available at https://github.com/gdcc/dataverse-external-vocab-support that can be configured to support lookup from the Research Orgnaization Registry (ROR) for the Author Affiliation Field and for the CrossRef Funding Registry (Fundreg) in the Funding Information/Agency field, both in the standard Citation metadata block. Application if these scripts to other fields, and the development of other scripts targetting child fields are now possible. \ No newline at end of file diff --git a/doc/release-notes/9175-external-exporters.md b/doc/release-notes/9175-external-exporters.md deleted file mode 100644 index 79b6358dac5..00000000000 --- a/doc/release-notes/9175-external-exporters.md +++ /dev/null @@ -1,11 +0,0 @@ -## Ability to Create New Exporters - -It is now possible for third parties to develop and share code to provide new metadata export formats for Dataverse. Export formats can be made available via the Dataverse UI and API or configured for use in Harvesting. Dataverse now provides developers with a separate dataverse-spi JAR file that contains the Java interfaces and classes required to create a new metadata Exporter. Once a new Exporter has been created and packaged as a JAR file, administrators can use it by specifying a local directory for third party Exporters, dropping then Exporter JAR there, and restarting Payara. This mechanism also allows new Exporters to replace any of Dataverse's existing metadata export formats. - -## Backward Incompatibilities - -Care should be taken when replacing Dataverse's internal metadata export formats as third party code, including other third party Exporters may depend on the contents of those export formats. When replacing an existing format, one must also remember to delete the cached metadata export files or run the reExport command for the metadata exports of existing datasets to be updated. - -## New JVM/MicroProfile Settings - -dataverse.spi.export.directory - specifies a directory, readable by the Dataverse server. Any Exporter JAR files placed in this directory will be read by Dataverse and used to add/replace the specified metadata format. \ No newline at end of file From b6659d4b827c2faed45010ea5f8b2fc2b877e94a Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 28 Jun 2023 23:17:36 +0200 Subject: [PATCH 0428/1525] fix(ct): optimize base image for Payara 6 #8305 --- modules/container-base/src/main/docker/Dockerfile | 2 +- .../src/main/docker/scripts/removeExpiredCaCerts.sh | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile index bbd02a14328..148c29e0ed1 100644 --- a/modules/container-base/src/main/docker/Dockerfile +++ b/modules/container-base/src/main/docker/Dockerfile @@ -206,7 +206,7 @@ RUN <org.apache.jasper.servlet.JspServlet#org.apache.jasper.servlet.JspServlet\n \n development\n false\n \n \n genStrAsCharArray\n true\n #' "${DOMAIN_DIR}/config/default-web.xml" + sed -i 's#org.glassfish.wasp.servlet.JspServlet#org.glassfish.wasp.servlet.JspServlet\n \n development\n false\n \n \n genStrAsCharArray\n true\n #' "${DOMAIN_DIR}/config/default-web.xml" # Cleanup old CA certificates to avoid unnecessary log clutter during startup ${SCRIPT_DIR}/removeExpiredCaCerts.sh # Delete generated files diff --git a/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh b/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh index 205a9eda5d7..c019c09130e 100644 --- a/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh +++ b/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh @@ -8,6 +8,14 @@ set -euo pipefail KEYSTORE="${DOMAIN_DIR}/config/cacerts.jks" +if [ ! -r "${KEYSTORE}" ]; then + KEYSTORE="${DOMAIN_DIR}/config/cacerts.p12" + if [ ! -r "${KEYSTORE}" ]; then + echo "Could not find CA certs keystore" + exit 1 + fi +fi + keytool -list -v -keystore "${KEYSTORE}" -storepass changeit 2>/dev/null | \ grep -i 'alias\|until' > aliases.txt From 7ce2cc94df4d5f91102e97d206cd189400e1c507 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 28 Jun 2023 23:51:05 +0200 Subject: [PATCH 0429/1525] fix(ui): make citation download widget JSF 4 compliant The backing bean actions were attached to normal "" elements, which means they were executed on page load and not, as expected, via JavaScript onClick. Former versions of JSF were less strict, now we need to use the standard compliant "" --- src/main/webapp/dataset-citation.xhtml | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/src/main/webapp/dataset-citation.xhtml b/src/main/webapp/dataset-citation.xhtml index 9baced25be0..b42dd5e563f 100644 --- a/src/main/webapp/dataset-citation.xhtml +++ b/src/main/webapp/dataset-citation.xhtml @@ -33,19 +33,13 @@
    From a57a8ea6a39520c06ea30934113b2c7118dadd18 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 28 Jun 2023 18:36:31 -0400 Subject: [PATCH 0430/1525] more notes added (#9670) --- doc/release-notes/5.14-release-notes.md | 73 +++++++++++++++---- .../9185-contact-email-updates.md | 12 --- .../9204-group-by-file-ordering.md | 9 --- doc/release-notes/9229-bearer-api-auth.md | 1 - doc/release-notes/9256-series.md | 42 ----------- .../9277-nonstopmode-pdf-guides.md | 3 - .../9331-extract-bounding-box.md | 1 - doc/release-notes/9374-binder-orig.md | 7 -- .../9414-containerized_dev_dependencies.md | 2 - 9 files changed, 58 insertions(+), 92 deletions(-) delete mode 100644 doc/release-notes/9185-contact-email-updates.md delete mode 100644 doc/release-notes/9204-group-by-file-ordering.md delete mode 100644 doc/release-notes/9229-bearer-api-auth.md delete mode 100644 doc/release-notes/9256-series.md delete mode 100644 doc/release-notes/9277-nonstopmode-pdf-guides.md delete mode 100644 doc/release-notes/9331-extract-bounding-box.md delete mode 100644 doc/release-notes/9374-binder-orig.md delete mode 100644 doc/release-notes/9414-containerized_dev_dependencies.md diff --git a/doc/release-notes/5.14-release-notes.md b/doc/release-notes/5.14-release-notes.md index fac8ffc4d75..7d6b612adb2 100644 --- a/doc/release-notes/5.14-release-notes.md +++ b/doc/release-notes/5.14-release-notes.md @@ -2,6 +2,8 @@ This release brings new features, enhancements, and bug fixes to the Dataverse software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. +You can jump directly to the Installation and/or Upgrade sections. + ## Release Highlights ### For installations using MDC (Make Data Count), it is now possible to display both the MDC metrics and the legacy access counts, generated before MDC was enabled. @@ -10,6 +12,7 @@ This is enabled via the new setting `:MDCStartDate` that specifies the cutoff da ### An API endpoint for deleting a file is finally available +**TODO: move under "other fixes"** Support for deleting files using native API: http://preview.guides.gdcc.io/en/develop/api/native-api.html#deleting-files (PR #9383) ### Changes to PID Provider JVM Settings @@ -95,6 +98,50 @@ Care should be taken when replacing Dataverse's internal metadata export formats dataverse.spi.export.directory - specifies a directory, readable by the Dataverse server. Any Exporter JAR files placed in this directory will be read by Dataverse and used to add/replace the specified metadata format. +### Contact Email Improvements + +Email sent from the contact forms to the contact(s) for a collection, dataset, or datafile can now optionally be cc'd to a support email address. The support email address can be changed from the default :SystemEmail address to a separate :SupportEmail address. When multiple contacts are listed, the system will now send one email to all contacts (with the optional cc if configured) instead of separate emails to each contact. Contact names with a comma that refer to Organizations will no longer have the name parts reversed in the email greeting. A new protected feedback API has been added. (PR #9204) + +#### Backward Incompatibilities + +**TODO: add to the backward incompatibilities section** +When there are multiple contacts, the system will now send one email with all of the contacts in the To: header instead of sending one email to each contact (with no indication that others have been notified). + +#### New JVM/MicroProfile Settings + +dataverse.mail.support-email - allows a separate email, distinct from the :SystemEmail to be used as the to address in emails from the contact form/ feedback api. +dataverse.mail.cc-support-on-contact-emails - include the support email address as a CC: entry when contact/feedback emails are sent to the contacts for a collection, dataset, or datafile. + +### Support for Grouping Dataset Files by Folder and Category Tag + +Dataverse now supports grouping dataset files by folder and/or optionally by Tag/Category. The default for whether to order by folder can be changed via :OrderByFolder. Ordering by category must be enabled by an administrator via the :CategoryOrder parameter which is used to specify which tags appear first (e.g. to put Documentation files before Data or Code files, etc.) These Group-By options work with the existing sort options, i.e. sorting alphabetically means that files within each folder or tag group will be sorted alphabetically. :AllowUsersToManageOrdering can be set to true to allow users to turn folder ordering and category ordering (if enabled) on or off in the current dataset view. + +#### New Setting + +:CategoryOrder - a comma separated list of Category/Tag names defining the order in which files with those tags should be displayed. The setting can include custom tag names along with the pre-defined defaults ( Documentation, Data, and Code, which can be overridden by the ::FileCategories setting.) +:OrderByFolder - defaults to true - whether to group files in the same folder together +:AllowUserManagementOfOrder - default false - allow users to toggle ordering on/off in the dataset display + +### Metadata field Series now repeatable + +This enhancement allows depositors to define multiple instances of the metadata field Series in the Citation Metadata block. + +Data contained in a dataset may belong to multiple series. Making the field repeatable makes it possible to reflect this fact in the dataset metadata. (PR #9256) + +### Guides in PDF Format + +An experimental version of the guides in PDF format is available at (PR #9474) + +Advice for contributors to documentation who want to help improve the PDF is available at http://preview.guides.gdcc.io/en/develop/developers/documentation.html#pdf-version-of-the-guides + +### Files downloaded from Binder are now in their original format. + +For example, data.dta (a Stata file) will be downloaded instead of data.tab (the archival version Dataverse creates as part of a successful ingest). (PR #9483) + +This should make it easier to write code to reproduce results as the dataset authors and subsequent researchers are likely operating on the original file format rather that the format that Dataverse creates. + +For details, see #9374, , and . + ### Handling of license information fixed in the API (PR #9568) @@ -134,6 +181,10 @@ Please see the [Update Metadata For a Dataset](https://guides.dataverse.org/en/l - Changes made in v5.13 and v5.14 in multiple PRs to improve the embedded Schema.org metadata in dataset pages will only be propagated to the Schema.Org JSON-LD metadata export if a reExportAll() is done. (PR #9102) - TODO: The 5.14 release notes should include the standard instructions for doing a reExportAll after updating the code, on account of the item above. (L.A.) - It is now possible to write external vocabulary scripts that target a single child field in a metadata block. Example scripts are now available at https://github.com/gdcc/dataverse-external-vocab-support that can be configured to support lookup from the Research Orgnaization Registry (ROR) for the Author Affiliation Field and for the CrossRef Funding Registry (Fundreg) in the Funding Information/Agency field, both in the standard Citation metadata block. Application if these scripts to other fields, and the development of other scripts targetting child fields are now possible (PR #9402) +- A feature flag called "api-bearer-auth" has been added. This allows OIDC useraccounts to send authenticated API requests using Bearer Tokens. Note: This feature is limited to OIDC! For more information, see http://preview.guides.gdcc.io/en/develop/installation/config.html#feature-flags (PR #9591) +- An attempt will be made to extract a geospatial bounding box (west, south, east, north) from NetCDF and HDF5 files and then insert these values into the geospatial metadata block, if enabled. (#9541) +- New alternative to setup the Dataverse dependencies for the development environment through Docker Compose. (PR #9417) +- New alternative, explained in the documentation, to build the Sphinx guides through a Docker container. (PR #9417) ## New JVM Options and MicroProfile Config Options @@ -221,7 +272,7 @@ In the following commands we assume that Payara 5 is installed in `/usr/local/pa - `service payara stop` - `service payara start` -6\. Reload citation metadata block (**TODO: may not be necessary, and/or another block may need to be updated**) +6\. Update the Citation metadata block: (the update makes the field Series repeatable) - `wget https://github.com/IQSS/dataverse/releases/download/v5.14/citation.tsv` - `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"` @@ -231,7 +282,7 @@ If you are running an English-only installation, you are finished with the citat - `wget https://github.com/IQSS/dataverse/releases/download/v5.14/citation.properties` - `cp citation.properties /home/dataverse/langBundles` -7\. Replace Solr schema.xml (**TODO: may not be necessary for 5.14) See specific instructions below for those installations without custom metadata blocks (1a) and those with custom metadata blocks (1b). +7\. Replace Solr schema.xml to allow multiple series to be used. See specific instructions below for those installations without custom metadata blocks (1a) and those with custom metadata blocks (1b). Note: with this release support for indexing of the experimental workflow metadata block has been removed from the standard schema.xml. If you are using the workflow metadata block be sure to follow the instructions in step 7b) below to maintain support for indexing workflow metadata. @@ -250,21 +301,13 @@ If you are using the workflow metadata block be sure to follow the instructions - Stop solr instance (usually service solr stop, depending on solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script) -- Edit the following line to your schema.xml (to indicate that productionPlace is now multiValued='true"): - - `` +- Edit the following lines in your schema.xml (to indicate that series and its components are now `multiValued="true"`): -- Add the following lines to your schema.xml to add support for geospatial indexing: + `` + `` + `` - `` - `` - `` - `` - `` - `` - `` - **TODO: why are we recommending editing the schema file by hand, instead of re-running the update script?** + **TODO: why are we recommending editing the schema file by hand, instead of re-running the update script?** - Restart Solr instance (usually service solr start, depending on solr/OS) diff --git a/doc/release-notes/9185-contact-email-updates.md b/doc/release-notes/9185-contact-email-updates.md deleted file mode 100644 index 3e03461a383..00000000000 --- a/doc/release-notes/9185-contact-email-updates.md +++ /dev/null @@ -1,12 +0,0 @@ -## Contact Email Improvements - -Email sent from the contact forms to the contact(s) for a collection, dataset, or datafile can now optionally be cc'd to a support email address. The support email address can be changed from the default :SystemEmail address to a separate :SupportEmail address. When multiple contacts are listed, the system will now send one email to all contacts (with the optional cc if configured) instead of separate emails to each contact. Contact names with a comma that refer to Organizations will no longer have the name parts reversed in the email greeting. A new protected feedback API has been added. - -## Backward Incompatibilities - -When there are multiple contacts, the system will now send one email with all of the contacts in the To: header instead of sending one email to each contact (with no indication that others have been notified). - -## New JVM/MicroProfile Settings - -dataverse.mail.support-email - allows a separate email, distinct from the :SystemEmail to be used as the to address in emails from the contact form/ feedback api. -dataverse.mail.cc-support-on-contact-emails - include the support email address as a CC: entry when contact/feedback emails are sent to the contacts for a collection, dataset, or datafile. \ No newline at end of file diff --git a/doc/release-notes/9204-group-by-file-ordering.md b/doc/release-notes/9204-group-by-file-ordering.md deleted file mode 100644 index 97f6b0750fb..00000000000 --- a/doc/release-notes/9204-group-by-file-ordering.md +++ /dev/null @@ -1,9 +0,0 @@ -### Support for Grouping Dataset Files by Folder and Category Tag - -Dataverse now supports grouping dataset files by folder and/or optionally by Tag/Category. The default for whether to order by folder can be changed via :OrderByFolder. Ordering by category must be enabled by an administrator via the :CategoryOrder parameter which is used to specify which tags appear first (e.g. to put Documentation files before Data or Code files, etc.) These Group-By options work with the existing sort options, i.e. sorting alphabetically means that files within each folder or tag group will be sorted alphabetically. :AllowUsersToManageOrdering can be set to true to allow users to turn folder ordering and category ordering (if enabled) on or off in the current dataset view. - -### New Setting - -:CategoryOrder - a comma separated list of Category/Tag names defining the order in which files with those tags should be displayed. The setting can include custom tag names along with the pre-defined defaults ( Documentation, Data, and Code, which can be overridden by the ::FileCategories setting.) -:OrderByFolder - defaults to true - whether to group files in the same folder together -:AllowUserManagementOfOrder - default false - allow users to toggle ordering on/off in the dataset display \ No newline at end of file diff --git a/doc/release-notes/9229-bearer-api-auth.md b/doc/release-notes/9229-bearer-api-auth.md deleted file mode 100644 index 543b803469b..00000000000 --- a/doc/release-notes/9229-bearer-api-auth.md +++ /dev/null @@ -1 +0,0 @@ -A feature flag called "api-bearer-auth" has been added. This allows OIDC useraccounts to send authenticated API requests using Bearer Tokens. Note: This feature is limited to OIDC! For more information, see http://preview.guides.gdcc.io/en/develop/installation/config.html#feature-flags diff --git a/doc/release-notes/9256-series.md b/doc/release-notes/9256-series.md deleted file mode 100644 index 97419af4616..00000000000 --- a/doc/release-notes/9256-series.md +++ /dev/null @@ -1,42 +0,0 @@ -## Metadata field Series now repeatable -This enhancement allows depositors to define multiple instances of the metadata field Series in the Citation Metadata block. - -## Major Use Cases and Infrastructure Enhancements -* Data contained in a dataset may belong to multiple series. Making the field Series repeatable will make it possible to reflect this fact in the dataset metadata. (Issue #9255, PR #9256) - -### Additional Upgrade Steps - -Update the Citation metadata block: - -wget https://github.com/IQSS/dataverse/releases/download/v5.14/citation.tsv -curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values" - -## Additional Release Steps - -1. Replace Solr schema.xml to allow multiple series to be used. See specific instructions below for those installations without custom metadata blocks (1a) and those with custom metadata blocks (1b). - -1a. - -For installations without Custom Metadata Blocks: - --stop solr instance (usually service solr stop, depending on solr installation/OS, see the Installation Guide - --replace schema.xml - -cp /tmp/dvinstall/schema.xml /usr/local/solr/solr-8.11.1/server/solr/collection1/conf - --start solr instance (usually service solr start, depending on solr/OS) - -1b. - -For installations with Custom Metadata Blocks: - --stop solr instance (usually service solr stop, depending on solr installation/OS, see the Installation Guide - -edit the following lines to your schema.xml (to indicate that series and its components are now multiValued='true"): - - - - - --restart solr instance (usually service solr start, depending on solr/OS) \ No newline at end of file diff --git a/doc/release-notes/9277-nonstopmode-pdf-guides.md b/doc/release-notes/9277-nonstopmode-pdf-guides.md deleted file mode 100644 index eab456416ba..00000000000 --- a/doc/release-notes/9277-nonstopmode-pdf-guides.md +++ /dev/null @@ -1,3 +0,0 @@ -An experimental version of the guides in PDF format is available at - -Advice for contributors to documentation who want to help improve the PDF is available at http://preview.guides.gdcc.io/en/develop/developers/documentation.html#pdf-version-of-the-guides diff --git a/doc/release-notes/9331-extract-bounding-box.md b/doc/release-notes/9331-extract-bounding-box.md deleted file mode 100644 index c4ff83e40c0..00000000000 --- a/doc/release-notes/9331-extract-bounding-box.md +++ /dev/null @@ -1 +0,0 @@ -An attempt will be made to extract a geospatial bounding box (west, south, east, north) from NetCDF and HDF5 files and then insert these values into the geospatial metadata block, if enabled. diff --git a/doc/release-notes/9374-binder-orig.md b/doc/release-notes/9374-binder-orig.md deleted file mode 100644 index 5e6ff0e5c4d..00000000000 --- a/doc/release-notes/9374-binder-orig.md +++ /dev/null @@ -1,7 +0,0 @@ -Files downloaded from Binder are now in their original format. - -For example, data.dta (a Stata file) will be downloaded instead of data.tab (the archival version Dataverse creates as part of a successful ingest). - -This should make it easier to write code to reproduce results as the dataset authors and subsequent researchers are likely operating on the original file format rather that the format that Dataverse creates. - -For details, see #9374, , and . diff --git a/doc/release-notes/9414-containerized_dev_dependencies.md b/doc/release-notes/9414-containerized_dev_dependencies.md deleted file mode 100644 index 4e3680573f9..00000000000 --- a/doc/release-notes/9414-containerized_dev_dependencies.md +++ /dev/null @@ -1,2 +0,0 @@ -New alternative to setup the Dataverse dependencies for the development environment through Docker Compose. -New alternative, explained in the documentation, to build the Sphinx guides through a Docker container. From a31444a7449233a32d2b3fb59d8cd0c8f8f37baf Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 29 Jun 2023 10:15:56 -0400 Subject: [PATCH 0431/1525] #9663 set tab title based on tool type --- .../java/edu/harvard/iq/dataverse/FilePage.java | 14 +++++++++++++- src/main/webapp/file.xhtml | 2 +- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index dc928018dea..b056f43d3b7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -1212,7 +1212,19 @@ public String getEmbargoPhrase() { return BundleUtil.getStringFromBundle("embargoed.willbeuntil"); } } - + + public String getToolTabTitle(){ + if( getSelectedTool() != null ){ + if(getSelectedTool().isPreviewTool()){ + return BundleUtil.getStringFromBundle("file.previewTab.header"); + } + if(getSelectedTool().isQueryTool()){ + return BundleUtil.getStringFromBundle("file.queryTab.header"); + } + } + return ""; + } + public String getIngestMessage() { return BundleUtil.getStringFromBundle("file.ingestFailed.message", Arrays.asList(settingsWrapper.getGuidesBaseUrl(), settingsWrapper.getGuidesVersion())); } diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index 5db72718cdd..6a91c248d86 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -359,7 +359,7 @@ - From 0ecd7c665d3bdafbb64ea784ba87c79f7d528132 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 29 Jun 2023 11:52:24 -0400 Subject: [PATCH 0432/1525] remaining notes added. (#9670) --- doc/release-notes/5.14-release-notes.md | 48 ++++++++++++++++++- .../9431-checksum-alg-in-direct-uploads.md | 4 -- doc/release-notes/9434-app-container.md | 12 ----- doc/release-notes/9480-h5web.md | 1 - doc/release-notes/9558-async-indexing.md | 3 -- doc/release-notes/9573-configbaker.md | 1 - .../9588-datasets-api-extension.md | 6 --- .../9656-api-optional-dataset-params.md | 5 -- 8 files changed, 47 insertions(+), 33 deletions(-) delete mode 100644 doc/release-notes/9431-checksum-alg-in-direct-uploads.md delete mode 100644 doc/release-notes/9434-app-container.md delete mode 100644 doc/release-notes/9480-h5web.md delete mode 100644 doc/release-notes/9558-async-indexing.md delete mode 100644 doc/release-notes/9573-configbaker.md delete mode 100644 doc/release-notes/9588-datasets-api-extension.md delete mode 100644 doc/release-notes/9656-api-optional-dataset-params.md diff --git a/doc/release-notes/5.14-release-notes.md b/doc/release-notes/5.14-release-notes.md index 7d6b612adb2..df87214c9fd 100644 --- a/doc/release-notes/5.14-release-notes.md +++ b/doc/release-notes/5.14-release-notes.md @@ -4,7 +4,28 @@ This release brings new features, enhancements, and bug fixes to the Dataverse s You can jump directly to the Installation and/or Upgrade sections. -## Release Highlights +## Release Highlights, New Features and Use Case Scenarios + +### For Dataverse developers, support for running Dataverse in Docker (experimental) + +Developers can experiment with running Dataverse in Docker: https://preview.guides.gdcc.io/en/develop/container/app-image.html (PR #9439) + +This is an image developers build locally. It is not meant for production use! + +To provide a complete container-based local development environment, developers can deploy a Dataverse container from +the new image in addition to other containers for the necessary dependencies: +https://preview.guides.gdcc.io/en/develop/container/dev-usage.html + +Please note that with this emerging solution we will sunset older tooling like `docker-aio` and `docker-dcm`. +We envision more testing possibilities in the future, to be discussed as part of the +[Dataverse Containerization Working Group](https://dc.wgs.gdcc.io). There is no roadmap yet, but you have been warned. +If there is some specific feature of these tools you would like to be kept, please [reach out](https://dc.wgs.gdcc.io). + +### Indexing performance improved + +Noticeable improvements in performance, especially for large datasets containing thousands of files. +Uploading files one by one to the dataset is much faster now, allowing uploading thousands of files in an acceptable timeframe. Not only uploading a file, but all edit operations on datasets containing many files, got faster. +Performance tweaks include indexing of the datasets in the background and optimizations in the amount of the indexing operations needed. Furthermore, updates to the dateset no longer wait for ingesting to finish. Ingesting was already running in the background, but it took a lock, preventing updating the dataset and degrading performance for datasets containing many files. (PR #9558) ### For installations using MDC (Make Data Count), it is now possible to display both the MDC metrics and the legacy access counts, generated before MDC was enabled. @@ -134,6 +155,26 @@ An experimental version of the guides in PDF format is available at Date: Thu, 29 Jun 2023 16:40:05 -0400 Subject: [PATCH 0433/1525] the release note - it's coming along. #9670 --- doc/release-notes/5.14-release-notes.md | 248 ++++++++++++------------ 1 file changed, 125 insertions(+), 123 deletions(-) diff --git a/doc/release-notes/5.14-release-notes.md b/doc/release-notes/5.14-release-notes.md index df87214c9fd..c4438b5ab45 100644 --- a/doc/release-notes/5.14-release-notes.md +++ b/doc/release-notes/5.14-release-notes.md @@ -2,9 +2,132 @@ This release brings new features, enhancements, and bug fixes to the Dataverse software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. -You can jump directly to the Installation and/or Upgrade sections. +Please note that, as an experiment, the sections of this release note are organized in a different order. The Upgrade and Installation sections are at the top, with the sections highlighting new features and fixes in detail further down. -## Release Highlights, New Features and Use Case Scenarios +## Installation + +If this is a new installation, please see our [Installation Guide](https://guides.dataverse.org/en/5.14/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/5.14/installation/intro.html#getting-help) if you need it! + +After your installation has gone into production, you are welcome to add it to our [map of installations](https://dataverse.org/installations) by opening an issue in the [dataverse-installations](https://github.com/IQSS/dataverse-installations) repo. + +## Upgrade Instructions + +0\. These instructions assume that you are upgrading from 5.13. If you are running an earlier version, the only safe way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to 5.14. + +If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. + +In the following commands we assume that Payara 5 is installed in `/usr/local/payara5`. If not, adjust as needed. + +`export PAYARA=/usr/local/payara5` + +(or `setenv PAYARA /usr/local/payara5` if you are using a `csh`-like shell) + +1\. Undeploy the previous version. + +- `$PAYARA/bin/asadmin list-applications` +- `$PAYARA/bin/asadmin undeploy dataverse<-version>` + +2\. Stop Payara and remove the generated directory + +- `service payara stop` +- `rm -rf $PAYARA/glassfish/domains/domain1/generated` + +3\. Start Payara + +- `service payara start` + +4\. Deploy this version. + +- `$PAYARA/bin/asadmin deploy dataverse-5.14.war` + +5\. Restart Payara + +- `service payara stop` +- `service payara start` + +6\. Update the Citation metadata block: (the update makes the field Series repeatable) + +- `wget https://github.com/IQSS/dataverse/releases/download/v5.14/citation.tsv` +- `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"` + +If you are running an English-only installation, you are finished with the citation block. Otherwise, download the updated citation.properties file and place it in the [`dataverse.lang.directory`](https://guides.dataverse.org/en/5.14/installation/config.html#configuring-the-lang-directory); `/home/dataverse/langBundles` used in the example below. + +- `wget https://github.com/IQSS/dataverse/releases/download/v5.14/citation.properties` +- `cp citation.properties /home/dataverse/langBundles` + +7\. Replace Solr schema.xml to allow multiple series to be used. See specific instructions below for those installations without custom metadata blocks (1a) and those with custom metadata blocks (1b). + +7a\. For installations without custom or experimental metadata blocks: + +- Stop Solr instance (usually service solr stop, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script) + +- Replace schema.xml + + - `cp /tmp/dvinstall/schema.xml /usr/local/solr/solr-8.11.1/server/solr/collection1/conf` + +- Start solr instance (usually service solr start, depending on Solr/OS) + +7b\. For installations with custom or experimental metadata blocks: + +- Stop solr instance (usually service solr stop, depending on solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script) + +- Edit the following lines in your schema.xml (to indicate that series and its components are now `multiValued="true"`): + + `` + `` + `` + + **TODO: why are we recommending editing the schema file by hand, instead of re-running the update script?** + +- Restart Solr instance (usually service solr start, depending on solr/OS) + +8\. Run ReExportAll to update dataset metadata exports + +Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/5.14/admin/metadataexport.html#batch-exports-through-the-api) + +## New JVM Options and MicroProfile Config Options + +The following PID provider settings are now available. See the section "Changes to PID Provider JVM Settings" below for more information. + +- `dataverse.pid.datacite.mds-api-url` +- `dataverse.pid.datacite.rest-api-url` +- `dataverse.pid.datacite.username` +- `dataverse.pid.datacite.password` +- `dataverse.pid.handlenet.key.path` +- `dataverse.pid.handlenet.key.passphrase` +- `dataverse.pid.handlenet.index` +- `dataverse.pid.permalink.base-url` +- `dataverse.pid.ezid.api-url` +- `dataverse.pid.ezid.username` +- `dataverse.pid.ezid.password` + +The following MicroProfile Config options have been added as part of [Signposting](https://signposting.org/) support. See the section "Signposting for Dataverse" for details. + +- `dataverse.signposting.level1-author-limit` +- `dataverse.signposting.level1-item-limit` + +The following JVM options are descrbed in the "Creating datasets with incomplete metadata through API" section below. + +- `dataverse.api.allow-incomplete-metadata` +- `dataverse.ui.show-validity-filter` +- `dataverse.ui.allow-review-for-incomplete` + +The following JVM/MicroProfile setting is for External Exporters, see "Mechanism Added for Adding External Exporters". + +- `dataverse.spi.export.directory` + +The JVM/MicroProfile setting for handling of support emails ("Contact Email Improvements") + +- `dataverse.mail.support-email` +- `dataverse.mail.cc-support-on-contact-emails` + + + +## Backward Incompatibilities + +**TODO: (L.A.)** + +## Detailed Release Highlights, New Features and Use Case Scenarios ### For Dataverse developers, support for running Dataverse in Docker (experimental) @@ -220,7 +343,6 @@ Please see the [Update Metadata For a Dataset](https://guides.dataverse.org/en/l - A date column has been added to the restricted file access request overview, indicating when the earliest request by that user was made. An issue was fixed where where the request list was not updated when a request was approved or rejected. (PR #9257) - A feature flag called "api-session-auth" has been added temporarily to aid in the development of the new frontend (#9063) but will be removed once bearer tokens (#9229) have been implemented. There is a security risk (CSRF) in enabling this flag! Do not use it in production! For more information, see http://preview.guides.gdcc.io/en/develop/installation/config.html#feature-flags - Changes made in v5.13 and v5.14 in multiple PRs to improve the embedded Schema.org metadata in dataset pages will only be propagated to the Schema.Org JSON-LD metadata export if a reExportAll() is done. (PR #9102) -- TODO: The 5.14 release notes should include the standard instructions for doing a reExportAll after updating the code, on account of the item above. (L.A.) - It is now possible to write external vocabulary scripts that target a single child field in a metadata block. Example scripts are now available at https://github.com/gdcc/dataverse-external-vocab-support that can be configured to support lookup from the Research Orgnaization Registry (ROR) for the Author Affiliation Field and for the CrossRef Funding Registry (Fundreg) in the Funding Information/Agency field, both in the standard Citation metadata block. Application if these scripts to other fields, and the development of other scripts targetting child fields are now possible (PR #9402) - A feature flag called "api-bearer-auth" has been added. This allows OIDC useraccounts to send authenticated API requests using Bearer Tokens. Note: This feature is limited to OIDC! For more information, see http://preview.guides.gdcc.io/en/develop/installation/config.html#feature-flags (PR #9591) - An attempt will be made to extract a geospatial bounding box (west, south, east, north) from NetCDF and HDF5 files and then insert these values into the geospatial metadata block, if enabled. (#9541) @@ -232,130 +354,10 @@ Please see the [Update Metadata For a Dataset](https://guides.dataverse.org/en/l - Direct upload via the Dataverse UI will now support any algorithm configured via the `:FileFixityChecksumAlgorithm` setting. External apps using the direct upload API can now query Dataverse to discover which algorithm should be used. Sites that have been using an algorithm other than MD5 and direct upload and/or dvwebloader may want to use the `/api/admin/updateHashValues` call (see https://guides.dataverse.org/en/latest/installation/config.html?highlight=updatehashvalues#filefixitychecksumalgorithm) to replace any MD5 hashes on existing files. (PR #9482) -## New JVM Options and MicroProfile Config Options - -**TODO: the section below is from 5.13; still needs to be updated for 5.14 (L.A.)** - -The following JVM option is now available: - -- `dataverse.personOrOrg.assumeCommaInPersonName` - the default is false - -The following MicroProfile Config options are now available (these can be treated as JVM options): - -- `dataverse.files.uploads` - alternative storage location of generated temporary files for UI file uploads -- `dataverse.api.signing-secret` - used by signed URLs -- `dataverse.solr.host` -- `dataverse.solr.port` -- `dataverse.solr.protocol` -- `dataverse.solr.core` -- `dataverse.solr.path` -- `dataverse.rserve.host` - -The following existing JVM options are now available via MicroProfile Config: - -- `dataverse.siteUrl` -- `dataverse.fqdn` -- `dataverse.files.directory` -- `dataverse.rserve.host` -- `dataverse.rserve.port` -- `dataverse.rserve.user` -- `dataverse.rserve.password` -- `dataverse.rserve.tempdir` - -## Notes for Developers and Integrators - -See the "Backward Incompatibilities" section below. - -## Backward Incompatibilities - -**TODO: (L.A.)** ## Complete List of Changes For the complete list of code changes in this release, see the [5.14 milestone](https://github.com/IQSS/dataverse/milestone/108?closed=1) on GitHub. -## Installation - -If this is a new installation, please see our [Installation Guide](https://guides.dataverse.org/en/5.14/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/5.14/installation/intro.html#getting-help) if you need it! - -After your installation has gone into production, you are welcome to add it to our [map of installations](https://dataverse.org/installations) by opening an issue in the [dataverse-installations](https://github.com/IQSS/dataverse-installations) repo. - - - -## Upgrade Instructions - -0\. These instructions assume that you've already successfully upgraded from version 4.x to 5.0 of the Dataverse software following the instructions in the [release notes for version 5.0](https://github.com/IQSS/dataverse/releases/tag/v5.0). After upgrading from the 4.x series to 5.0, you should progress through the other 5.x releases before attempting the upgrade to 5.14. - -If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. - -In the following commands we assume that Payara 5 is installed in `/usr/local/payara5`. If not, adjust as needed. - -`export PAYARA=/usr/local/payara5` - -(or `setenv PAYARA /usr/local/payara5` if you are using a `csh`-like shell) - -1\. Undeploy the previous version. - -- `$PAYARA/bin/asadmin list-applications` -- `$PAYARA/bin/asadmin undeploy dataverse<-version>` - -2\. Stop Payara and remove the generated directory - -- `service payara stop` -- `rm -rf $PAYARA/glassfish/domains/domain1/generated` - -3\. Start Payara - -- `service payara start` - -4\. Deploy this version. - -- `$PAYARA/bin/asadmin deploy dataverse-5.14.war` - -5\. Restart Payara - -- `service payara stop` -- `service payara start` - -6\. Update the Citation metadata block: (the update makes the field Series repeatable) - -- `wget https://github.com/IQSS/dataverse/releases/download/v5.14/citation.tsv` -- `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"` - -If you are running an English-only installation, you are finished with the citation block. Otherwise, download the updated citation.properties file and place in the [`dataverse.lang.directory`](https://guides.dataverse.org/en/5.14/installation/config.html#configuring-the-lang-directory). - -- `wget https://github.com/IQSS/dataverse/releases/download/v5.14/citation.properties` -- `cp citation.properties /home/dataverse/langBundles` - -7\. Replace Solr schema.xml to allow multiple series to be used. See specific instructions below for those installations without custom metadata blocks (1a) and those with custom metadata blocks (1b). - -Note: with this release support for indexing of the experimental workflow metadata block has been removed from the standard schema.xml. -If you are using the workflow metadata block be sure to follow the instructions in step 7b) below to maintain support for indexing workflow metadata. - -7a\. For installations without custom or experimental metadata blocks: - -- Stop Solr instance (usually service solr stop, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script) - -- Replace schema.xml - - - `cp /tmp/dvinstall/schema.xml /usr/local/solr/solr-8.11.1/server/solr/collection1/conf` - -- Start solr instance (usually service solr start, depending on Solr/OS) - -7b\. For installations with custom or experimental metadata blocks: - -- Stop solr instance (usually service solr stop, depending on solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script) - -- Edit the following lines in your schema.xml (to indicate that series and its components are now `multiValued="true"`): - - `` - `` - `` - - **TODO: why are we recommending editing the schema file by hand, instead of re-running the update script?** - -- Restart Solr instance (usually service solr start, depending on solr/OS) - -### TODO: any optional upgrade steps to be added here From c3c35b6159365f506c36db4141ee22fe05138f4e Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 29 Jun 2023 16:46:48 -0400 Subject: [PATCH 0434/1525] format #9670 --- doc/release-notes/5.14-release-notes.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/doc/release-notes/5.14-release-notes.md b/doc/release-notes/5.14-release-notes.md index c4438b5ab45..dbcac9c6792 100644 --- a/doc/release-notes/5.14-release-notes.md +++ b/doc/release-notes/5.14-release-notes.md @@ -2,7 +2,7 @@ This release brings new features, enhancements, and bug fixes to the Dataverse software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. -Please note that, as an experiment, the sections of this release note are organized in a different order. The Upgrade and Installation sections are at the top, with the sections highlighting new features and fixes in detail further down. +Please note that, as an experiment, the sections of this release note are organized in a different order. The Upgrade and Installation sections are at the top, with the detailed sections highlighting new features and fixes further down. ## Installation @@ -73,10 +73,11 @@ If you are running an English-only installation, you are finished with the citat - Edit the following lines in your schema.xml (to indicate that series and its components are now `multiValued="true"`): - `` - `` - `` - +``` + + + +``` **TODO: why are we recommending editing the schema file by hand, instead of re-running the update script?** - Restart Solr instance (usually service solr start, depending on solr/OS) From 831b4884c8825ad603d5ba31d925fe2ce1890520 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 29 Jun 2023 17:20:54 -0400 Subject: [PATCH 0435/1525] filled the "backward incompatibilities" section. #9670 --- doc/release-notes/5.14-release-notes.md | 44 ++++++++++++++----------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/doc/release-notes/5.14-release-notes.md b/doc/release-notes/5.14-release-notes.md index dbcac9c6792..305593343e4 100644 --- a/doc/release-notes/5.14-release-notes.md +++ b/doc/release-notes/5.14-release-notes.md @@ -1,5 +1,7 @@ # Dataverse Software 5.14 +**TODO: guides preview links need to be updated to the proper 5.14 urls before it goes live** + This release brings new features, enhancements, and bug fixes to the Dataverse software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. Please note that, as an experiment, the sections of this release note are organized in a different order. The Upgrade and Installation sections are at the top, with the detailed sections highlighting new features and fixes further down. @@ -82,13 +84,11 @@ If you are running an English-only installation, you are finished with the citat - Restart Solr instance (usually service solr start, depending on solr/OS) -8\. Run ReExportAll to update dataset metadata exports - -Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/5.14/admin/metadataexport.html#batch-exports-through-the-api) +8\. Run ReExportAll to update dataset metadata exports. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/5.14/admin/metadataexport.html#batch-exports-through-the-api). ## New JVM Options and MicroProfile Config Options -The following PID provider settings are now available. See the section "Changes to PID Provider JVM Settings" below for more information. +The following PID provider options are now available. See the section "Changes to PID Provider JVM Settings" below for more information. - `dataverse.pid.datacite.mds-api-url` - `dataverse.pid.datacite.rest-api-url` @@ -126,7 +126,21 @@ The JVM/MicroProfile setting for handling of support emails ("Contact Email Impr ## Backward Incompatibilities -**TODO: (L.A.)** +The following list of potential backward incompatibilities references the sections of the "Detailed Release Highlights..." portion of the document further below where the corresponding changes are explained in detail. + +### using the new External Exporters framework + +Care should be taken when replacing Dataverse's internal metadata export formats as third party code, including other third party Exporters may depend on the contents of those export formats. When replacing an existing format, one must also remember to delete the cached metadata export files or run the reExport command for the metadata exports of existing datasets to be updated. + +See "Mechanism Added for Adding External Exporters". + +### Publishing via API + +When publishing a dataset via API, it now mirrors the UI behavior by requiring that the dataset has either a standard license configured, or has valid Custom Terms of Use (if allowed by the instance). Attempting to publish a dataset without such **will fail with an error message**. + +See "Handling of license information fixed in the API" for guidance on how to ensure that datasets created or updated via native API have a license configured. + + ## Detailed Release Highlights, New Features and Use Case Scenarios @@ -155,11 +169,6 @@ Performance tweaks include indexing of the datasets in the background and optimi This is enabled via the new setting `:MDCStartDate` that specifies the cutoff date. If a dataset has any legacy access counts collected prior to that date, those numbers will be displayed in addition to the any MDC numbers recorded since then. (PR #6543) -### An API endpoint for deleting a file is finally available - -**TODO: move under "other fixes"** -Support for deleting files using native API: http://preview.guides.gdcc.io/en/develop/api/native-api.html#deleting-files (PR #9383) - ### Changes to PID Provider JVM Settings In prepration for a future feature to use multiple PID providers at the same time, all JVM settings for PID providers @@ -235,8 +244,6 @@ It is now possible for third parties to develop and share code to provide new me #### Backward Incompatibilities -**TODO: add below** - Care should be taken when replacing Dataverse's internal metadata export formats as third party code, including other third party Exporters may depend on the contents of those export formats. When replacing an existing format, one must also remember to delete the cached metadata export files or run the reExport command for the metadata exports of existing datasets to be updated. #### New JVM/MicroProfile Settings @@ -245,11 +252,11 @@ dataverse.spi.export.directory - specifies a directory, readable by the Datavers ### Contact Email Improvements -Email sent from the contact forms to the contact(s) for a collection, dataset, or datafile can now optionally be cc'd to a support email address. The support email address can be changed from the default :SystemEmail address to a separate :SupportEmail address. When multiple contacts are listed, the system will now send one email to all contacts (with the optional cc if configured) instead of separate emails to each contact. Contact names with a comma that refer to Organizations will no longer have the name parts reversed in the email greeting. A new protected feedback API has been added. (PR #9204) +Email sent from the contact forms to the contact(s) for a collection, dataset, or datafile can now optionally be cc'd to a support email address. The support email address can be changed from the default :SystemEmail address to a separate :SupportEmail address. When multiple contacts are listed, the system will now send one email to all contacts (with the optional cc if configured) instead of separate emails to each contact. Contact names with a comma that refer to Organizations will no longer have the name parts reversed in the email greeting. A new protected feedback API has been added. (PR #9186) #### Backward Incompatibilities -**TODO: add to the backward incompatibilities section** +**TODO: double-check if this is really a "backward incompatibility"; if it is, add to that section** When there are multiple contacts, the system will now send one email with all of the contacts in the To: header instead of sending one email to each contact (with no indication that others have been notified). #### New JVM/MicroProfile Settings @@ -259,9 +266,9 @@ dataverse.mail.cc-support-on-contact-emails - include the support email address ### Support for Grouping Dataset Files by Folder and Category Tag -Dataverse now supports grouping dataset files by folder and/or optionally by Tag/Category. The default for whether to order by folder can be changed via :OrderByFolder. Ordering by category must be enabled by an administrator via the :CategoryOrder parameter which is used to specify which tags appear first (e.g. to put Documentation files before Data or Code files, etc.) These Group-By options work with the existing sort options, i.e. sorting alphabetically means that files within each folder or tag group will be sorted alphabetically. :AllowUsersToManageOrdering can be set to true to allow users to turn folder ordering and category ordering (if enabled) on or off in the current dataset view. +Dataverse now supports grouping dataset files by folder and/or optionally by Tag/Category. The default for whether to order by folder can be changed via :OrderByFolder. Ordering by category must be enabled by an administrator via the :CategoryOrder parameter which is used to specify which tags appear first (e.g. to put Documentation files before Data or Code files, etc.) These Group-By options work with the existing sort options, i.e. sorting alphabetically means that files within each folder or tag group will be sorted alphabetically. :AllowUsersToManageOrdering can be set to true to allow users to turn folder ordering and category ordering (if enabled) on or off in the current dataset view. (PR #9204) -#### New Setting +#### New Settings :CategoryOrder - a comma separated list of Category/Tag names defining the order in which files with those tags should be displayed. The setting can include custom tag names along with the pre-defined defaults ( Documentation, Data, and Code, which can be overridden by the ::FileCategories setting.) :OrderByFolder - defaults to true - whether to group files in the same folder together @@ -311,8 +318,6 @@ For details, see #9374, , (PR #9568) -**TODO: add this under "backward incompatibility"** - When publishing a dataset via API, it now requires the dataset to either have a standard license configured, or have valid Custom Terms of Use (if allowed by the instance). Attempting to publish a dataset without such **will fail with an error message**. This introduces a backward incompatibility, and if you have scripts that automatically create, update and publish datasets, this last step may start failing. Because, unfortunately, there were some problems with the datasets APIs that made it difficult to manage licenses, so an API user was likely to end up with a dataset missing either of the above. In this release we have addressed it by making the following fixes: We fixed the incompatibility between the format in which license information was *exported* in json, and the format the create and update APIs were expecting it for *import* (https://github.com/IQSS/dataverse/issues/9155). This means that the following json format can now be imported: @@ -341,6 +346,7 @@ Please see the [Update Metadata For a Dataset](https://guides.dataverse.org/en/l ### Changes and fixes in this release not already mentioned above include: +- An endpoint for deleting a file has been added to the native API: http://preview.guides.gdcc.io/en/develop/api/native-api.html#deleting-files (PR #9383) - A date column has been added to the restricted file access request overview, indicating when the earliest request by that user was made. An issue was fixed where where the request list was not updated when a request was approved or rejected. (PR #9257) - A feature flag called "api-session-auth" has been added temporarily to aid in the development of the new frontend (#9063) but will be removed once bearer tokens (#9229) have been implemented. There is a security risk (CSRF) in enabling this flag! Do not use it in production! For more information, see http://preview.guides.gdcc.io/en/develop/installation/config.html#feature-flags - Changes made in v5.13 and v5.14 in multiple PRs to improve the embedded Schema.org metadata in dataset pages will only be propagated to the Schema.Org JSON-LD metadata export if a reExportAll() is done. (PR #9102) @@ -355,8 +361,6 @@ Please see the [Update Metadata For a Dataset](https://guides.dataverse.org/en/l - Direct upload via the Dataverse UI will now support any algorithm configured via the `:FileFixityChecksumAlgorithm` setting. External apps using the direct upload API can now query Dataverse to discover which algorithm should be used. Sites that have been using an algorithm other than MD5 and direct upload and/or dvwebloader may want to use the `/api/admin/updateHashValues` call (see https://guides.dataverse.org/en/latest/installation/config.html?highlight=updatehashvalues#filefixitychecksumalgorithm) to replace any MD5 hashes on existing files. (PR #9482) - - ## Complete List of Changes For the complete list of code changes in this release, see the [5.14 milestone](https://github.com/IQSS/dataverse/milestone/108?closed=1) on GitHub. From ceabc33afb10930ad12e52dc87bafdacea6ad5a1 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 29 Jun 2023 17:31:47 -0400 Subject: [PATCH 0436/1525] typo #9670 --- doc/release-notes/5.14-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/5.14-release-notes.md b/doc/release-notes/5.14-release-notes.md index 305593343e4..284080e196a 100644 --- a/doc/release-notes/5.14-release-notes.md +++ b/doc/release-notes/5.14-release-notes.md @@ -167,7 +167,7 @@ Performance tweaks include indexing of the datasets in the background and optimi ### For installations using MDC (Make Data Count), it is now possible to display both the MDC metrics and the legacy access counts, generated before MDC was enabled. -This is enabled via the new setting `:MDCStartDate` that specifies the cutoff date. If a dataset has any legacy access counts collected prior to that date, those numbers will be displayed in addition to the any MDC numbers recorded since then. (PR #6543) +This is enabled via the new setting `:MDCStartDate` that specifies the cutoff date. If a dataset has any legacy access counts collected prior to that date, those numbers will be displayed in addition to any MDC numbers recorded since then. (PR #6543) ### Changes to PID Provider JVM Settings From f49e2de330c101de9b04a2752613a459d0980f67 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 30 Jun 2023 14:25:16 -0400 Subject: [PATCH 0437/1525] solr schema instructions (#9670) --- doc/release-notes/5.14-release-notes.md | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/doc/release-notes/5.14-release-notes.md b/doc/release-notes/5.14-release-notes.md index 284080e196a..1feaa4f6cd2 100644 --- a/doc/release-notes/5.14-release-notes.md +++ b/doc/release-notes/5.14-release-notes.md @@ -2,6 +2,8 @@ **TODO: guides preview links need to be updated to the proper 5.14 urls before it goes live** +(if this note appears truncated on the GitHub Releases page, you can view it in full in the source tree: https://github.com/IQSS/dataverse/blob/master/doc/release-notes/5.14-release-notes.md) + This release brings new features, enhancements, and bug fixes to the Dataverse software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. Please note that, as an experiment, the sections of this release note are organized in a different order. The Upgrade and Installation sections are at the top, with the detailed sections highlighting new features and fixes further down. @@ -73,14 +75,18 @@ If you are running an English-only installation, you are finished with the citat - Stop solr instance (usually service solr stop, depending on solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script) -- Edit the following lines in your schema.xml (to indicate that series and its components are now `multiValued="true"`): - +- There are 2 ways to regenerate the schema: By collecting the output of the Dataverse fields API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed): +``` + wget https://raw.githubusercontent.com/IQSS/dataverse/master/conf/solr/8.11.1/update-fields.sh + chmod +x update-fields.sh + curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-8.8.1/server/solr/collection1/conf/schema.xml +``` + OR, alternatively, you can edit the following lines in your schema.xml by hand as follows (to indicate that series and its components are now multiValued="true"): ``` ``` - **TODO: why are we recommending editing the schema file by hand, instead of re-running the update script?** - Restart Solr instance (usually service solr start, depending on solr/OS) From b53ad5d1a88b41a65f7f122b42e8602b186bc4eb Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 30 Jun 2023 14:30:33 -0400 Subject: [PATCH 0438/1525] cosmetic (#9670) --- doc/release-notes/5.14-release-notes.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/release-notes/5.14-release-notes.md b/doc/release-notes/5.14-release-notes.md index 1feaa4f6cd2..eb7cf55bb23 100644 --- a/doc/release-notes/5.14-release-notes.md +++ b/doc/release-notes/5.14-release-notes.md @@ -63,7 +63,7 @@ If you are running an English-only installation, you are finished with the citat 7a\. For installations without custom or experimental metadata blocks: -- Stop Solr instance (usually service solr stop, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script) +- Stop Solr instance (usually service solr stop, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script)) - Replace schema.xml @@ -73,15 +73,15 @@ If you are running an English-only installation, you are finished with the citat 7b\. For installations with custom or experimental metadata blocks: -- Stop solr instance (usually service solr stop, depending on solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script) +- Stop solr instance (usually service solr stop, depending on solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script)) -- There are 2 ways to regenerate the schema: By collecting the output of the Dataverse fields API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed): +- There are 2 ways to regenerate the schema: Either by collecting the output of the Dataverse schema API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed): ``` wget https://raw.githubusercontent.com/IQSS/dataverse/master/conf/solr/8.11.1/update-fields.sh chmod +x update-fields.sh curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-8.8.1/server/solr/collection1/conf/schema.xml ``` - OR, alternatively, you can edit the following lines in your schema.xml by hand as follows (to indicate that series and its components are now multiValued="true"): +OR, alternatively, you can edit the following lines in your schema.xml by hand as follows (to indicate that series and its components are now multiValued="true"): ``` From ec83cc224b3cbb32d8e299f459f56f0e88240425 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 30 Jun 2023 14:36:01 -0400 Subject: [PATCH 0439/1525] cosmetic. (#9670) --- doc/release-notes/5.14-release-notes.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/5.14-release-notes.md b/doc/release-notes/5.14-release-notes.md index eb7cf55bb23..442338b248b 100644 --- a/doc/release-notes/5.14-release-notes.md +++ b/doc/release-notes/5.14-release-notes.md @@ -1,6 +1,6 @@ # Dataverse Software 5.14 -**TODO: guides preview links need to be updated to the proper 5.14 urls before it goes live** +**TODO: guides preview links need to be changed to point to the permanent `https://guides.dataverse.org/en/5.14/` locations before the release goes live** (if this note appears truncated on the GitHub Releases page, you can view it in full in the source tree: https://github.com/IQSS/dataverse/blob/master/doc/release-notes/5.14-release-notes.md) @@ -59,7 +59,7 @@ If you are running an English-only installation, you are finished with the citat - `wget https://github.com/IQSS/dataverse/releases/download/v5.14/citation.properties` - `cp citation.properties /home/dataverse/langBundles` -7\. Replace Solr schema.xml to allow multiple series to be used. See specific instructions below for those installations without custom metadata blocks (1a) and those with custom metadata blocks (1b). +7\. Upate Solr schema.xml to allow multiple series to be used. See specific instructions below for those installations without custom metadata blocks (7a) and those with custom metadata blocks (7b). 7a\. For installations without custom or experimental metadata blocks: From 55316ec06d44f15660a34694358d05f8c805ffee Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 30 Jun 2023 14:38:16 -0400 Subject: [PATCH 0440/1525] cosmetic (#9670) --- doc/release-notes/5.14-release-notes.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/doc/release-notes/5.14-release-notes.md b/doc/release-notes/5.14-release-notes.md index 442338b248b..f95fb1d5f99 100644 --- a/doc/release-notes/5.14-release-notes.md +++ b/doc/release-notes/5.14-release-notes.md @@ -28,8 +28,7 @@ In the following commands we assume that Payara 5 is installed in `/usr/local/pa 1\. Undeploy the previous version. -- `$PAYARA/bin/asadmin list-applications` -- `$PAYARA/bin/asadmin undeploy dataverse<-version>` +- `$PAYARA/bin/asadmin undeploy dataverse-5.13` 2\. Stop Payara and remove the generated directory From 85d22d9457dd4b4afd476a555f124c03cd5dbcc3 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 30 Jun 2023 17:53:22 -0400 Subject: [PATCH 0441/1525] formatting (#9670) --- doc/release-notes/5.14-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/5.14-release-notes.md b/doc/release-notes/5.14-release-notes.md index f95fb1d5f99..9e5b326f63e 100644 --- a/doc/release-notes/5.14-release-notes.md +++ b/doc/release-notes/5.14-release-notes.md @@ -80,7 +80,7 @@ If you are running an English-only installation, you are finished with the citat chmod +x update-fields.sh curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-8.8.1/server/solr/collection1/conf/schema.xml ``` -OR, alternatively, you can edit the following lines in your schema.xml by hand as follows (to indicate that series and its components are now multiValued="true"): +OR, alternatively, you can edit the following lines in your schema.xml by hand as follows (to indicate that series and its components are now `multiValued="true"`): ``` From a7b428862b42d0b69a0054fcb00ec40bcc6b51f8 Mon Sep 17 00:00:00 2001 From: Kim Jin Yeon Date: Mon, 3 Jul 2023 12:43:57 +0900 Subject: [PATCH 0442/1525] Add description on Basic File Aceess about FilePIDsEnabled --- doc/sphinx-guides/source/api/dataaccess.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index e76ea167587..98df2d5a046 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -83,7 +83,7 @@ Basic access URI: ``/api/access/datafile/$id`` -.. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. +.. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. (FilePIDsEnabled ì˜µì…˜ì´ ì¼œì ¸ ìžˆì„ ë•Œë§Œ ë™ìž‘하는 것 언급하고 FilePIDsEnabled 설명 문서 ë§í¬ 첨부하기) Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB* :: From e3fd94502d4bc60541cd233847e92367d4be054d Mon Sep 17 00:00:00 2001 From: Minji Woo Date: Mon, 3 Jul 2023 13:05:52 +0900 Subject: [PATCH 0443/1525] Update dataaccess.rst file access General description of the basic file access: FilePIDsEnabled --- doc/sphinx-guides/source/api/dataaccess.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index 98df2d5a046..2ecdc0d2a23 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -83,7 +83,7 @@ Basic access URI: ``/api/access/datafile/$id`` -.. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. (FilePIDsEnabled ì˜µì…˜ì´ ì¼œì ¸ ìžˆì„ ë•Œë§Œ ë™ìž‘하는 것 언급하고 FilePIDsEnabled 설명 문서 ë§í¬ 첨부하기) +.. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. However, this file access method is only effective when the FilePIDsEnabled option is enabled, which can be authorized by the admin. Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB* :: From 89353d4fb8135b5f957cea799bc5347c6ae9c556 Mon Sep 17 00:00:00 2001 From: kmina02 <79454352+kmina02@users.noreply.github.com> Date: Mon, 3 Jul 2023 13:12:14 +0900 Subject: [PATCH 0444/1525] Update dataaccess.rst put further link about FilePIDsEnabled --- doc/sphinx-guides/source/api/dataaccess.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index 2ecdc0d2a23..15a93ab436a 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -84,7 +84,7 @@ Basic access URI: ``/api/access/datafile/$id`` .. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. However, this file access method is only effective when the FilePIDsEnabled option is enabled, which can be authorized by the admin. - +For further information, refer to https://guides.dataverse.org/en/5.13/installation/config.html?highlight=filepidsenabled#id255 Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB* :: GET http://$SERVER/api/access/datafile/:persistentId?persistentId=doi:10.5072/FK2/J8SJZB From ca796d785b3470805c98adeb6ed86e843c5059b4 Mon Sep 17 00:00:00 2001 From: Kim Jin Yeon Date: Mon, 3 Jul 2023 13:22:48 +0900 Subject: [PATCH 0445/1525] Update dataaccess.rst --- doc/sphinx-guides/source/api/dataaccess.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index 15a93ab436a..e4cde143d29 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -83,8 +83,7 @@ Basic access URI: ``/api/access/datafile/$id`` -.. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. However, this file access method is only effective when the FilePIDsEnabled option is enabled, which can be authorized by the admin. -For further information, refer to https://guides.dataverse.org/en/5.13/installation/config.html?highlight=filepidsenabled#id255 +.. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. However, this file access method is only effective when the FilePIDsEnabled option is enabled, which can be authorized by the admin. For further information, refer to https://guides.dataverse.org/en/latest/installation/config.html?highlight=pidsenabled Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB* :: GET http://$SERVER/api/access/datafile/:persistentId?persistentId=doi:10.5072/FK2/J8SJZB From 20e76599b78b5d77dee89784e99241b4095850b2 Mon Sep 17 00:00:00 2001 From: Kim Jin Yeon Date: Mon, 3 Jul 2023 14:06:03 +0900 Subject: [PATCH 0446/1525] Update dataaccess.rst --- doc/sphinx-guides/source/api/dataaccess.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index e4cde143d29..99e7f7eca92 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -84,6 +84,7 @@ Basic access URI: ``/api/access/datafile/$id`` .. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. However, this file access method is only effective when the FilePIDsEnabled option is enabled, which can be authorized by the admin. For further information, refer to https://guides.dataverse.org/en/latest/installation/config.html?highlight=pidsenabled + Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB* :: GET http://$SERVER/api/access/datafile/:persistentId?persistentId=doi:10.5072/FK2/J8SJZB From 17290efa53ac7b692a1d851b0d4e10a7d4cd8aea Mon Sep 17 00:00:00 2001 From: Kim Jin Yeon Date: Mon, 3 Jul 2023 14:12:33 +0900 Subject: [PATCH 0447/1525] Update dataaccess.rst --- doc/sphinx-guides/source/api/dataaccess.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index 99e7f7eca92..7b1feed7814 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -83,7 +83,7 @@ Basic access URI: ``/api/access/datafile/$id`` -.. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. However, this file access method is only effective when the FilePIDsEnabled option is enabled, which can be authorized by the admin. For further information, refer to https://guides.dataverse.org/en/latest/installation/config.html?highlight=pidsenabled +.. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. However, this file access method is only effective when the FilePIDsEnabled option is enabled, which can be authorized by the admin. For further information, refer to `FilePIDsEnabled `_ Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB* :: From 98fe06d0cecf3022a6c50ecfc110753105894d76 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Mon, 3 Jul 2023 17:11:56 +0200 Subject: [PATCH 0448/1525] added info on go (golang) library --- doc/sphinx-guides/source/api/client-libraries.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst index 9efcf7eb153..c8c38827961 100755 --- a/doc/sphinx-guides/source/api/client-libraries.rst +++ b/doc/sphinx-guides/source/api/client-libraries.rst @@ -69,3 +69,7 @@ Ruby https://github.com/libis/dataverse_api is a Ruby gem for Dataverse APIs. It is registered as a library on Rubygems (https://rubygems.org/search?query=dataverse). The gem is created and maintained by the LIBIS team (https://www.libis.be) at the University of Leuven (https://www.kuleuven.be). + +Go +-- +https://github.com/libis/rdm-dataverse-go-api is go API library that can be used in your project by simply adding it in the `go.mod` file as `github.com/libis/rdm-dataverse-go-api` dependency. See also the GitHub page for more detail and usage examples. From 73c3235c2442e285274ed7857098eaebdd0297d6 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 4 Jul 2023 10:39:31 +0100 Subject: [PATCH 0449/1525] Stash: getVersionFiles API extension with pagination and order criteria WIP --- .../dataverse/DatasetVersionServiceBean.java | 76 ++++++++++++++++++- .../harvard/iq/dataverse/api/Datasets.java | 12 ++- 2 files changed, 83 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 9f272ec6877..5100f9d26eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -49,7 +49,21 @@ public class DatasetVersionServiceBean implements java.io.Serializable { private static final Logger logger = Logger.getLogger(DatasetVersionServiceBean.class.getCanonicalName()); private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); - + + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL = "SELECT fm FROM FileMetadata fm WHERE fm.datasetVersion.id=:datasetVersionId ORDER BY fm.label"; + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE = "SELECT fm FROM FileMetadata fm, DvObject dvo" + + " WHERE fm.datasetVersion.id = :datasetVersionId" + + " AND fm.dataFile.id = dvo.id" + + " ORDER BY CASE WHEN dvo.publicationDate IS NOT NULL THEN dvo.publicationDate ELSE dvo.createDate END"; + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE = "SELECT fm FROM FileMetadata fm, DataFile df" + + " WHERE fm.datasetVersion.id = :datasetVersionId" + + " AND fm.dataFile.id = df.id" + + " ORDER BY df.filesize"; + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE = "SELECT fm FROM FileMetadata fm, DataFile df" + + " WHERE fm.datasetVersion.id = :datasetVersionId" + + " AND fm.dataFile.id = df.id" + + " ORDER BY df.contentType"; + @EJB DatasetServiceBean datasetService; @@ -150,7 +164,19 @@ public DatasetVersion getDatasetVersion(){ return this.datasetVersionForResponse; } } // end RetrieveDatasetVersionResponse - + + /** + * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionServiceBean#getFileMetadatas(DatasetVersion, Integer, Integer, FileMetadatasOrderCriteria)} + */ + public enum FileMetadatasOrderCriteria { + NameAZ, + NameZA, + Newest, + Oldest, + Size, + Type + } + public DatasetVersion find(Object pk) { return em.find(DatasetVersion.class, pk); } @@ -1210,4 +1236,50 @@ public List getUnarchivedDatasetVersions(){ return null; } } // end getUnarchivedDatasetVersions + + /** + * Returns a FileMetadata list of files in the specified DatasetVersion + * + * @param datasetVersion the DatasetVersion to access + * @param limit for pagination, can be null + * @param offset for pagination, can be null + * @param orderCriteria a FileMetadatasOrderCriteria to order the results + * @return a FileMetadata list of the specified DatasetVersion + */ + public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileMetadatasOrderCriteria orderCriteria) { + Query query = em.createQuery(getQueryStringFromFileMetadatasOrderCriteria(orderCriteria)) + .setParameter("datasetVersionId", datasetVersion.getId()); + if (limit != null) { + query.setMaxResults(limit); + } + if (offset != null) { + query.setFirstResult(offset); + } + return query.getResultList(); + } + + private String getQueryStringFromFileMetadatasOrderCriteria(FileMetadatasOrderCriteria orderCriteria) { + String queryString; + switch (orderCriteria) { + case NameZA: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL + " DESC"; + break; + case Newest: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE + " DESC"; + break; + case Oldest: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE; + break; + case Size: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE; + break; + case Type: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE; + break; + default: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL; + break; + } + return queryString; + } } // end class diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 8c1390b597e..5aef4302631 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -493,9 +493,15 @@ public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id" @GET @AuthRequired @Path("{id}/versions/{versionId}/files") - public Response getVersionFiles(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> ok( jsonFileMetadatas( - getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getFileMetadatas())), getRequestUser(crc)); + public Response getVersionFiles(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset, @QueryParam("orderCriteria") String orderCriteria, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response( req -> { + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); + try { + return ok(jsonFileMetadatas(datasetversionService.getFileMetadatas(datasetVersion, limit, offset, orderCriteria != null ? DatasetVersionServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameAZ))); + } catch (IllegalArgumentException e) { + return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); + } + }, getRequestUser(crc)); } @GET From dc31788c09eb168565ea84c04e4cf9ed9ccd1387 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 4 Jul 2023 11:43:58 +0100 Subject: [PATCH 0450/1525] Stash: IT tests for getVersionFiles API endpoint WIP --- .../dataverse/DatasetVersionServiceBean.java | 2 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 32 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 31 +++++++++++++++--- 3 files changed, 60 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 5100f9d26eb..45edfb01777 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -166,7 +166,7 @@ public DatasetVersion getDatasetVersion(){ } // end RetrieveDatasetVersionResponse /** - * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionServiceBean#getFileMetadatas(DatasetVersion, Integer, Integer, FileMetadatasOrderCriteria)} + * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionServiceBean#getFileMetadatas} */ public enum FileMetadatasOrderCriteria { NameAZ, diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 687ab453d24..973818776f3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -9,6 +9,7 @@ import java.util.logging.Logger; +import edu.harvard.iq.dataverse.DatasetVersionServiceBean; import org.junit.BeforeClass; import org.junit.Test; import org.skyscreamer.jsonassert.JSONAssert; @@ -3202,4 +3203,35 @@ public void getDatasetVersionCitation() { // We check that the returned message contains information expected for the citation string .body("data.message", containsString("DRAFT VERSION")); } + + @Test + public void getVersionFiles() throws IOException { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + String testFileName1 = "test_1.txt"; + String testFileName2 = "test_2.txt"; + String testFileName3 = "test_3.txt"; + String testFileName4 = "test_4.txt"; + + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName1, new byte[50], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName2, new byte[100], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName3, new byte[200], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName4, new byte[300], apiToken); + + Response getVersionFilesResponseOrderNameAZ = UtilIT.getVersionFiles(datasetId, ":latest", 2, 0, DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameAZ.toString(), apiToken); + getVersionFilesResponseOrderNameAZ.prettyPrint(); + + // TODO + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 6e24d0a0ecb..e0f648e48aa 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -4,6 +4,8 @@ import com.jayway.restassured.http.ContentType; import com.jayway.restassured.path.json.JsonPath; import com.jayway.restassured.response.Response; + +import java.io.*; import java.util.UUID; import java.util.logging.Logger; import javax.json.Json; @@ -12,8 +14,6 @@ import javax.json.JsonObject; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; -import java.io.File; -import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; @@ -30,7 +30,6 @@ import com.mashape.unirest.http.Unirest; import com.mashape.unirest.http.exceptions.UnirestException; import com.mashape.unirest.request.GetRequest; -import java.io.InputStream; import edu.harvard.iq.dataverse.util.FileUtil; import java.util.Base64; import org.apache.commons.io.IOUtils; @@ -49,8 +48,11 @@ import edu.harvard.iq.dataverse.DatasetFieldType; import edu.harvard.iq.dataverse.DatasetFieldValue; import edu.harvard.iq.dataverse.util.StringUtil; -import java.io.StringReader; + import java.util.Collections; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; @@ -3240,4 +3242,25 @@ static Response getDatasetVersionCitation(Integer datasetId, String version, Str .get("/api/datasets/" + datasetId + "/versions/" + version + "/citation"); return response; } + + static Response getVersionFiles(Integer datasetId, String version, int limit, int offset, String orderCriteria, String apiToken) { + Response response = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .contentType("application/json") + .get("/api/datasets/" + datasetId + "/versions/" + version + "/files?limit=" + limit + "&offset=" + offset + "&orderCriteria=" + orderCriteria); + return response; + } + + static Response createAndUploadTestFile(String persistentId, String testFileName, byte[] testFileContentInBytes, String apiToken) throws IOException { + Path pathToTempDir = Paths.get(Files.createTempDirectory(null).toString()); + String pathToTestFile = pathToTempDir + File.separator + testFileName; + File testFile = new File(pathToTestFile); + FileOutputStream fileOutputStream = new FileOutputStream(testFile); + + fileOutputStream.write(testFileContentInBytes); + fileOutputStream.flush(); + fileOutputStream.close(); + + return uploadZipFileViaSword(persistentId, pathToTestFile, apiToken); + } } From fff211910e4a060f873e8949e3ea2d9653ea46d5 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 4 Jul 2023 21:44:45 +0100 Subject: [PATCH 0451/1525] Added: IT tests for getVersionFiles API endpoint --- .../harvard/iq/dataverse/api/DatasetsIT.java | 102 ++++++++++++++++-- .../edu/harvard/iq/dataverse/api/UtilIT.java | 18 +++- 2 files changed, 108 insertions(+), 12 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 973818776f3..e994536f03a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -94,7 +94,6 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import static org.junit.Assert.assertFalse; public class DatasetsIT { @@ -3223,15 +3222,104 @@ public void getVersionFiles() throws IOException { String testFileName2 = "test_2.txt"; String testFileName3 = "test_3.txt"; String testFileName4 = "test_4.txt"; + String testFileName5 = "test_5.png"; UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName1, new byte[50], apiToken); - UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName2, new byte[100], apiToken); - UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName3, new byte[200], apiToken); - UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName4, new byte[300], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName2, new byte[200], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName3, new byte[100], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName5, new byte[300], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName4, new byte[400], apiToken); - Response getVersionFilesResponseOrderNameAZ = UtilIT.getVersionFiles(datasetId, ":latest", 2, 0, DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameAZ.toString(), apiToken); - getVersionFilesResponseOrderNameAZ.prettyPrint(); + String testDatasetVersion = ":latest"; - // TODO + // Test pagination and NameAZ order criteria (the default criteria) + int testPageSize = 2; + + // Test page 1 + Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, apiToken); + + int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + assertEquals(testPageSize, fileMetadatasCount); + + getVersionFilesResponsePaginated.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)) + .body("data[1].label", equalTo(testFileName2)); + + // Test page 2 + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, apiToken); + + fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + assertEquals(testPageSize, fileMetadatasCount); + + getVersionFilesResponsePaginated.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName3)) + .body("data[1].label", equalTo(testFileName4)); + + // Test page 3 (last) + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, apiToken); + + fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + getVersionFilesResponsePaginated.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName5)); + + // Test NameZA order criteria + Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); + + getVersionFilesResponseNameZACriteria.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName5)) + .body("data[1].label", equalTo(testFileName4)) + .body("data[2].label", equalTo(testFileName3)) + .body("data[3].label", equalTo(testFileName2)) + .body("data[4].label", equalTo(testFileName1)); + + // Test Newest order criteria + Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); + + getVersionFilesResponseNewestCriteria.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName4)) + .body("data[1].label", equalTo(testFileName5)) + .body("data[2].label", equalTo(testFileName3)) + .body("data[3].label", equalTo(testFileName2)) + .body("data[4].label", equalTo(testFileName1)); + + // Test Oldest order criteria + Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); + + getVersionFilesResponseOldestCriteria.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)) + .body("data[1].label", equalTo(testFileName2)) + .body("data[2].label", equalTo(testFileName3)) + .body("data[3].label", equalTo(testFileName5)) + .body("data[4].label", equalTo(testFileName4)); + + // Test Size order criteria + Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); + + getVersionFilesResponseSizeCriteria.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)) + .body("data[1].label", equalTo(testFileName3)) + .body("data[2].label", equalTo(testFileName2)) + .body("data[3].label", equalTo(testFileName5)) + .body("data[4].label", equalTo(testFileName4)); + + // Test Type order criteria + Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); + + getVersionFilesResponseTypeCriteria.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName5)) + .body("data[1].label", equalTo(testFileName1)) + .body("data[2].label", equalTo(testFileName2)) + .body("data[3].label", equalTo(testFileName3)) + .body("data[4].label", equalTo(testFileName4)); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index e0f648e48aa..37e287ab19c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3243,12 +3243,20 @@ static Response getDatasetVersionCitation(Integer datasetId, String version, Str return response; } - static Response getVersionFiles(Integer datasetId, String version, int limit, int offset, String orderCriteria, String apiToken) { - Response response = given() + static Response getVersionFiles(Integer datasetId, String version, Integer limit, Integer offset, String orderCriteria, String apiToken) { + RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .contentType("application/json") - .get("/api/datasets/" + datasetId + "/versions/" + version + "/files?limit=" + limit + "&offset=" + offset + "&orderCriteria=" + orderCriteria); - return response; + .contentType("application/json"); + if (limit != null) { + requestSpecification = requestSpecification.queryParam("limit", limit); + } + if (offset != null) { + requestSpecification = requestSpecification.queryParam("offset", offset); + } + if (orderCriteria != null) { + requestSpecification = requestSpecification.queryParam("orderCriteria", orderCriteria); + } + return requestSpecification.get("/api/datasets/" + datasetId + "/versions/" + version + "/files"); } static Response createAndUploadTestFile(String persistentId, String testFileName, byte[] testFileContentInBytes, String apiToken) throws IOException { From e8951a48b5e252a00f5f40a6bda14a8642d89dbe Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 5 Jul 2023 09:58:38 +0100 Subject: [PATCH 0452/1525] Removed: unused imports --- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 37e287ab19c..591ab1c4222 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -50,8 +50,6 @@ import edu.harvard.iq.dataverse.util.StringUtil; import java.util.Collections; -import java.util.zip.ZipEntry; -import java.util.zip.ZipOutputStream; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; From e400f97a5f813a35d93ea04177f5b5b36ebce753 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 5 Jul 2023 10:12:52 -0400 Subject: [PATCH 0453/1525] merge issue --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index bdab2818fbc..70199c64fa4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -282,7 +282,7 @@ public Response createDataset(@Context ContainerRequestContext crc, String jsonB ds.setGlobalIdCreateTime(null); Dataset managedDs = null; try { - managedDs = execCommand(new CreateNewDatasetCommand(ds, createDataverseRequest(u), false, null, validate)); + managedDs = execCommand(new CreateNewDatasetCommand(ds, createDataverseRequest(u), null, validate)); } catch (WrappedResponse ww) { Throwable cause = ww.getCause(); StringBuilder sb = new StringBuilder(); From b41364de2142ca7ac4d9ba8d362053cd8ce530b5 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 5 Jul 2023 10:43:39 -0400 Subject: [PATCH 0454/1525] remove/change references to alreadyExists in text/comments --- .../AbstractGlobalIdServiceBean.java | 2 +- .../iq/dataverse/DOIDataCiteServiceBean.java | 10 +++--- .../iq/dataverse/DOIEZIdServiceBean.java | 4 +-- .../iq/dataverse/DataFileServiceBean.java | 32 ------------------- .../command/impl/ImportDatasetCommand.java | 5 --- 5 files changed, 8 insertions(+), 45 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java index c8af1107828..2a3f2d50364 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java @@ -115,7 +115,7 @@ public Map getMetadataForTargetURL(DvObject dvObject) { @Override public boolean alreadyRegistered(DvObject dvo) throws Exception { if(dvo==null) { - logger.severe("Null DvObject sent to alreadyExists()."); + logger.severe("Null DvObject sent to alreadyRegistered()."); return false; } GlobalId globalId = dvo.getGlobalId(); diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java index c25050493fb..fa0a745d80f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java @@ -45,20 +45,20 @@ public boolean registerWhenPublished() { @Override public boolean alreadyRegistered(GlobalId pid, boolean noProviderDefault) { - logger.log(Level.FINE,"alreadyExists"); + logger.log(Level.FINE,"alreadyRegistered"); if(pid==null || pid.asString().isEmpty()) { logger.fine("No identifier sent."); return false; } - boolean alreadyExists; + boolean alreadyRegistered; String identifier = pid.asString(); try{ - alreadyExists = doiDataCiteRegisterService.testDOIExists(identifier); + alreadyRegistered = doiDataCiteRegisterService.testDOIExists(identifier); } catch (Exception e){ - logger.log(Level.WARNING, "alreadyExists failed"); + logger.log(Level.WARNING, "alreadyRegistered failed"); return false; } - return alreadyExists; + return alreadyRegistered; } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java index e7ca175230d..d9b0fde15da 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java @@ -64,7 +64,7 @@ public boolean registerWhenPublished() { @Override public boolean alreadyRegistered(GlobalId pid, boolean noProviderDefault) throws Exception { - logger.log(Level.FINE,"alreadyExists"); + logger.log(Level.FINE,"alreadyRegistered"); try { HashMap result = ezidService.getMetadata(pid.asString()); return result != null && !result.isEmpty(); @@ -78,7 +78,7 @@ public boolean alreadyRegistered(GlobalId pid, boolean noProviderDefault) throws if (e.getLocalizedMessage().contains("no such identifier")){ return false; } - logger.log(Level.WARNING, "alreadyExists failed"); + logger.log(Level.WARNING, "alreadyRegistered failed"); logger.log(Level.WARNING, "getIdentifier(dvObject) {0}", pid.asString()); logger.log(Level.WARNING, "String {0}", e.toString()); logger.log(Level.WARNING, "localized message {0}", e.getLocalizedMessage()); diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index ca69caa9802..c30bfce368a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -1084,38 +1084,6 @@ public List selectFilesWithMissingOriginalSizes() { * @param idServiceBean * @return {@code true} iff the global identifier is unique. */ -/* public boolean isGlobalIdUnique(String userIdentifier, DataFile datafile, GlobalIdServiceBean idServiceBean) { - String testProtocol = ""; - String testAuthority = ""; - if (datafile.getAuthority() != null){ - testAuthority = datafile.getAuthority(); - } else { - testAuthority = settingsService.getValueForKey(SettingsServiceBean.Key.Authority); - } - if (datafile.getProtocol() != null){ - testProtocol = datafile.getProtocol(); - } else { - testProtocol = settingsService.getValueForKey(SettingsServiceBean.Key.Protocol); - } - - boolean u = em.createNamedQuery("DvObject.findByProtocolIdentifierAuthority") - .setParameter("protocol", testProtocol) - .setParameter("authority", testAuthority) - .setParameter("identifier",userIdentifier) - .getResultList().isEmpty(); - - try{ - if (idServiceBean.alreadyExists(new GlobalId(testProtocol, testAuthority, userIdentifier))) { - u = false; - } - } catch (Exception e){ - //we can live with failure - means identifier not found remotely - } - - - return u; - } -*/ public void finalizeFileDelete(Long dataFileId, String storageLocation) throws IOException { // Verify that the DataFile no longer exists: if (find(dataFileId) != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportDatasetCommand.java index 506c6becd32..478272950bd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportDatasetCommand.java @@ -79,11 +79,6 @@ protected void additionalParameterTests(CommandContext ctxt) throws CommandExcep * that exist (and accessible in the PID provider account configured in * Dataverse) but aren't findable to be used. That could be the case if, for * example, someone was importing a draft dataset from elsewhere. - * - * Also note that just replacing the call above with the alreadyExists() call - * here would break import cases where a DOI is public but not managable with - * the currently configured PID provider credentials. If this is not a valid use - * case, the GET above could be removed. */ GlobalIdServiceBean globalIdServiceBean = GlobalIdServiceBean.getBean(ds.getProtocol(), ctxt); if (globalIdServiceBean != null) { From 5fb4a12bd23602eae9f5e8a2f8f8a5ba75ca5648 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 5 Jul 2023 13:53:56 -0400 Subject: [PATCH 0455/1525] #9670 fix typo --- doc/release-notes/5.14-release-notes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/5.14-release-notes.md b/doc/release-notes/5.14-release-notes.md index 9e5b326f63e..0c7eff4c330 100644 --- a/doc/release-notes/5.14-release-notes.md +++ b/doc/release-notes/5.14-release-notes.md @@ -176,7 +176,7 @@ This is enabled via the new setting `:MDCStartDate` that specifies the cutoff da ### Changes to PID Provider JVM Settings -In prepration for a future feature to use multiple PID providers at the same time, all JVM settings for PID providers +In preparation for a future feature to use multiple PID providers at the same time, all JVM settings for PID providers have been enabled to be configured using MicroProfile Config. In the same go, they were renamed to match the name of the provider to be configured. From 6264fa43a34e3946e2933716ad6e375350022186 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 5 Jul 2023 19:12:45 +0100 Subject: [PATCH 0456/1525] Added: publication date field to data file payload --- .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 601d1c34e17..ed06a2c360b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -687,6 +687,7 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) { .add("fileMetadataId", fileMetadata.getId()) .add("tabularTags", getTabularFileTags(df)) .add("creationDate", df.getCreateDateFormattedYYYYMMDD()) + .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD()) .add("dataTables", df.getDataTables().isEmpty() ? null : JsonPrinter.jsonDT(df.getDataTables())) .add("varGroups", fileMetadata.getVarGroups().isEmpty() ? JsonPrinter.jsonVarGroup(fileMetadata.getVarGroups()) From 6687e2875c8b723df64c91404fde66de64a8e5cf Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 5 Jul 2023 14:38:38 -0400 Subject: [PATCH 0457/1525] call correct script --- src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java index 5279b10047a..b5f2ee97094 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java @@ -290,7 +290,7 @@ public void handleCommandLinkClick(FileMetadata fmd){ if (FileUtil.isRequestAccessPopupRequired(fmd.getDatasetVersion())){ addFileForRequestAccess(fmd.getDataFile()); - PrimeFaces.current().executeScript("PF('requestAccessPopup').show()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show()"); } else { requestAccess(fmd.getDataFile()); } From 99f09fa188b9e3763d91c7a2398f2ae9ad22d2e0 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 5 Jul 2023 14:39:11 -0400 Subject: [PATCH 0458/1525] update method call to match changes in FileDownloadServiceBean --- .../iq/dataverse/engine/command/impl/RequestAccessCommand.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java index db0054e375e..a8c6150b21e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java @@ -88,9 +88,8 @@ public DataFile execute(CommandContext ctxt) throws CommandException { file.addFileAccessRequester(requester); requester.getFileAccessRequests().add(fileAccessRequest); if (sendNotification) { - //QDRADA logger.info("ctxt.fileDownload().sendRequestFileAccessNotification(this.file, requester);"); - //ctxt.fileDownload().sendRequestFileAccessNotification(this.file, requester); + ctxt.fileDownload().sendRequestFileAccessNotification(this.file.getOwner(), this.file.getId(), requester); } return ctxt.files().save(file); } From 34c7c14be882c5b14a5b4c5f9a88dd3abc52f3d4 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 5 Jul 2023 16:21:32 -0400 Subject: [PATCH 0459/1525] handle missing restricted key, fix iteration --- .../edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index c0b39dd6101..29a59cea795 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -1712,7 +1712,7 @@ public static void createDataDscr(XMLStreamWriter xmlw, JsonArray fileDetails) t // we're not writing the opening tag until we find an actual // tabular datafile. for (int i=0;i Date: Wed, 5 Jul 2023 17:07:52 -0400 Subject: [PATCH 0460/1525] only include restricted key for export provider --- .../export/InternalExportDataProvider.java | 2 +- .../iq/dataverse/util/json/JsonPrinter.java | 20 +++++++++++++------ 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/InternalExportDataProvider.java b/src/main/java/edu/harvard/iq/dataverse/export/InternalExportDataProvider.java index efa2c0c9300..7c76c4972a8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/InternalExportDataProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/InternalExportDataProvider.java @@ -77,7 +77,7 @@ public JsonArray getDatasetFileDetails() { JsonArrayBuilder jab = Json.createArrayBuilder(); for (FileMetadata fileMetadata : dv.getFileMetadatas()) { DataFile dataFile = fileMetadata.getDataFile(); - jab.add(JsonPrinter.json(dataFile, fileMetadata)); + jab.add(JsonPrinter.json(dataFile, fileMetadata, true)); } return jab.build(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 4bc1e224977..97640aa226d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -617,7 +617,7 @@ public static JsonObjectBuilder json(FileMetadata fmd) { .add("version", fmd.getVersion()) .add("datasetVersionId", fmd.getDatasetVersion().getId()) .add("categories", getFileCategories(fmd)) - .add("dataFile", JsonPrinter.json(fmd.getDataFile(), fmd)); + .add("dataFile", JsonPrinter.json(fmd.getDataFile(), fmd, false)); } public static JsonObjectBuilder json(AuxiliaryFile auxFile) { @@ -633,10 +633,10 @@ public static JsonObjectBuilder json(AuxiliaryFile auxFile) { .add("dataFile", JsonPrinter.json(auxFile.getDataFile())); } public static JsonObjectBuilder json(DataFile df) { - return JsonPrinter.json(df, null); + return JsonPrinter.json(df, null, false); } - public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) { + public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boolean forExportDataProvider) { // File names are no longer stored in the DataFile entity; // (they are instead in the FileMetadata (as "labels") - this way // the filename can change between versions... @@ -661,7 +661,7 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) { JsonObjectBuilder embargo = df.getEmbargo() != null ? JsonPrinter.json(df.getEmbargo()) : null; - return jsonObjectBuilder() + NullSafeJsonBuilder builder = jsonObjectBuilder() .add("id", df.getId()) .add("persistentId", pidString) .add("pidURL", pidURL) @@ -672,7 +672,6 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) { .add("categories", getFileCategories(fileMetadata)) .add("embargo", embargo) //.add("released", df.isReleased()) - //.add("restricted", df.isRestricted()) .add("storageIdentifier", df.getStorageIdentifier()) .add("originalFileFormat", df.getOriginalFileFormat()) .add("originalFormatLabel", df.getOriginalFormatLabel()) @@ -692,7 +691,16 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) { .add("md5", getMd5IfItExists(df.getChecksumType(), df.getChecksumValue())) .add("checksum", getChecksumTypeAndValue(df.getChecksumType(), df.getChecksumValue())) .add("tabularTags", getTabularFileTags(df)) - .add("creationDate", df.getCreateDateFormattedYYYYMMDD()); + .add("creationDate", df.getCreateDateFormattedYYYYMMDD()); + /* + * The restricted state was not included prior to #9175 so to avoid backward + * incompatability, it is now only added when generating json for the + * InternalExportDataProvider fileDetails. + */ + if (forExportDataProvider) { + builder.add("restricted", df.isRestricted()); + } + return builder; } //Started from https://github.com/RENCI-NRIG/dataverse/, i.e. https://github.com/RENCI-NRIG/dataverse/commit/2b5a1225b42cf1caba85e18abfeb952171c6754a From ceeb13fb95bdbe2d23c4a48a97612b33d7927353 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 5 Jul 2023 17:27:01 -0400 Subject: [PATCH 0461/1525] update flyway numbering --- ...estbook-on-request.sql => V5.13.0.3__guestbook-on-request.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V5.13.0.2__guestbook-on-request.sql => V5.13.0.3__guestbook-on-request.sql} (100%) diff --git a/src/main/resources/db/migration/V5.13.0.2__guestbook-on-request.sql b/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql similarity index 100% rename from src/main/resources/db/migration/V5.13.0.2__guestbook-on-request.sql rename to src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql From 639cff8287f3fcfc5179d2b75a888efe78f07216 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 6 Jul 2023 08:01:52 +0100 Subject: [PATCH 0462/1525] Added: getCountGuestbookResponsesByDataFileId API endpoint --- .../edu/harvard/iq/dataverse/api/Files.java | 15 +++++++- .../edu/harvard/iq/dataverse/api/FilesIT.java | 36 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 6 ++++ 3 files changed, 56 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index f6eda085c95..467341f4077 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -12,6 +12,7 @@ import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; import edu.harvard.iq.dataverse.TermsOfUseAndAccessValidator; import edu.harvard.iq.dataverse.UserNotificationServiceBean; import edu.harvard.iq.dataverse.api.auth.AuthRequired; @@ -102,7 +103,9 @@ public class Files extends AbstractApiBean { SettingsServiceBean settingsService; @Inject MakeDataCountLoggingServiceBean mdcLogService; - + @Inject + GuestbookResponseServiceBean guestbookResponseService; + private static final Logger logger = Logger.getLogger(Files.class.getName()); @@ -818,4 +821,14 @@ public Response getExternalToolFMParams(@Context ContainerRequestContext crc, @P public Response getFixityAlgorithm() { return ok(systemConfig.getFileFixityChecksumAlgorithm().toString()); } + + @GET + @Path("{id}/guestbookResponses/count") + public Response getCountGuestbookResponsesByDataFileId(@PathParam("id") String dataFileId) { + try { + return ok(guestbookResponseService.getCountGuestbookResponsesByDataFileId(Long.parseLong(dataFileId)).toString()); + } catch (NumberFormatException nfe) { + return badRequest("File identifier has to be numeric."); + } + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index ed4d255ab74..49b41d1e0e0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2018,4 +2018,40 @@ public void testDeleteFile() { .body("data.files[0]", equalTo(null)) .statusCode(OK.getStatusCode()); } + + @Test + public void testGetCountGuestbookResponsesByDataFileId() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload test file + String pathToTestFile = "src/main/webapp/resources/images/dataverseproject.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Publish collection and dataset + UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); + UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + + // Download test file + Integer testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); + + Response downloadResponse = UtilIT.downloadFile(testFileId, apiToken); + downloadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get count guestbook responses and assert it is 1 + Response getGuestbookResponsesByDataFileIdResponse = UtilIT.getCountGuestbookResponsesByDataFileId(testFileId, apiToken); + getGuestbookResponsesByDataFileIdResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo("1")); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 591ab1c4222..6c2543bcf82 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3269,4 +3269,10 @@ static Response createAndUploadTestFile(String persistentId, String testFileName return uploadZipFileViaSword(persistentId, pathToTestFile, apiToken); } + + static Response getCountGuestbookResponsesByDataFileId(Integer dataFileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/files/" + dataFileId + "/guestbookResponses/count"); + } } From 886a5082b2af8f9435b4bd9d0d1ac0a6e532c0a1 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 6 Jul 2023 12:27:40 +0100 Subject: [PATCH 0463/1525] Added: canDownloadFile method to FileDownloadServiceBean --- .../iq/dataverse/FileDownloadServiceBean.java | 46 +++- .../FileDownloadServiceBeanTest.java | 246 ++++++++++++++++++ 2 files changed, 291 insertions(+), 1 deletion(-) create mode 100644 src/test/java/edu/harvard/iq/dataverse/FileDownloadServiceBeanTest.java diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index a90489be29a..5c490b18ecc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -572,5 +572,49 @@ public String getDirectStorageLocatrion(String storageLocation) { return null; } - + + /** + * + * Checks if a user can download a file based on the file metadata and the permissions of the user + * + * This method is based on {@link edu.harvard.iq.dataverse.FileDownloadHelper#canDownloadFile(FileMetadata), + * and has been adapted to make it callable from the API instead of a view + * + * @param user requesting the download + * @param fileMetadata of the particular file to download + * @return boolean + */ + public boolean canDownloadFile(User user, FileMetadata fileMetadata){ + if (fileMetadata == null){ + return false; + } + if ((fileMetadata.getId() == null) || (fileMetadata.getDataFile().getId() == null)){ + return false; + } + if (user == null) { + return false; + } + if (user instanceof PrivateUrlUser) { + // Always allow download for PrivateUrlUser + return true; + } + + if (fileMetadata.getDatasetVersion().isDeaccessioned()) { + return this.permissionService.userOn(user, fileMetadata.getDatasetVersion().getDataset()).has(Permission.EditDataset); + } + + // Note that `isRestricted` at the FileMetadata level is for expressing intent by version. Enforcement is done with `isRestricted` at the DataFile level. + boolean isRestrictedFile = fileMetadata.isRestricted() || fileMetadata.getDataFile().isRestricted(); + if (!isRestrictedFile && !FileUtil.isActivelyEmbargoed(fileMetadata)){ + return true; + } + + // See if the DataverseRequest, which contains IP Groups, has permission to download the file. + if (permissionService.requestOn(dvRequestService.getDataverseRequest(), fileMetadata.getDataFile()).has(Permission.DownloadFile)) { + logger.fine("The DataverseRequest (User plus IP address) has access to download the file."); + return true; + } + + return false; + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/FileDownloadServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/FileDownloadServiceBeanTest.java new file mode 100644 index 00000000000..d754dd357a9 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/FileDownloadServiceBeanTest.java @@ -0,0 +1,246 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; +import org.mockito.ArgumentMatchers; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.time.LocalDate; + +import static edu.harvard.iq.dataverse.mocks.MocksFactory.makeAuthenticatedUser; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + + +@ExtendWith(MockitoExtension.class) +public class FileDownloadServiceBeanTest { + + @Mock + private PermissionServiceBean permissionServiceBeanStub; + @Mock + private DataverseRequestServiceBean dataverseRequestServiceBeanMock; + @Mock + private MakeDataCountLoggingServiceBean makeDataCountLoggingServiceBeanMock; + + private FileDownloadServiceBean sut; + + private User testUser; + + @BeforeEach + public void setUp() { + sut = new FileDownloadServiceBean(); + sut.permissionService = permissionServiceBeanStub; + sut.dvRequestService = dataverseRequestServiceBeanMock; + sut.mdcLogService = makeDataCountLoggingServiceBeanMock; + testUser = makeAuthenticatedUser("Test", "Test"); + } + + @Test + public void testCanDownloadFile_withoutUser() { + assertFalse(sut.canDownloadFile(null, new FileMetadata())); + } + + @Test + public void testCanDownloadFile_withoutFileMetadata() { + assertFalse(sut.canDownloadFile(testUser, null)); + } + + @Test + void testCanDownloadFile_withNullMetadataId() { + FileMetadata testFileMetadata = new FileMetadata(); + testFileMetadata.setId(null); + + assertFalse(sut.canDownloadFile(testUser, testFileMetadata)); + } + + @Test + void testCanDownloadFile_withNullDataFileId() { + FileMetadata testFileMetadata = new FileMetadata(); + testFileMetadata.setId(1L); + DataFile testDataFile = new DataFile(); + testDataFile.setId(null); + testFileMetadata.setDataFile(testDataFile); + + assertFalse(sut.canDownloadFile(testUser, testFileMetadata)); + } + + @ParameterizedTest + @CsvSource({"false", "true"}) + void testCanDownloadFile_forDeaccessionedFile(boolean hasPermission) { + DataFile testDataFile = new DataFile(); + testDataFile.setId(2L); + + DatasetVersion testDatasetVersion = new DatasetVersion(); + testDatasetVersion.setDataset(new Dataset()); + testDatasetVersion.setVersionState(DatasetVersion.VersionState.DEACCESSIONED); + + FileMetadata testFileMetadata = new FileMetadata(); + testFileMetadata.setId(1L); + testFileMetadata.setDataFile(testDataFile); + testFileMetadata.setDatasetVersion(testDatasetVersion); + + mockPermissionResponseUserOn(hasPermission); + + Assertions.assertEquals(hasPermission, sut.canDownloadFile(testUser, testFileMetadata)); + } + + @Test + void testCanDownloadFile_forUnrestrictedReleasedFile() { + DataFile testDataFile = new DataFile(); + testDataFile.setId(2L); + + DatasetVersion testDatasetVersion = new DatasetVersion(); + testDatasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED); + + FileMetadata testFileMetadata = new FileMetadata(); + testFileMetadata.setId(1L); + testFileMetadata.setRestricted(false); + testFileMetadata.setDataFile(testDataFile); + testFileMetadata.setDatasetVersion(testDatasetVersion); + + assertTrue(sut.canDownloadFile(testUser, testFileMetadata)); + } + + @Test + void testCanDownloadFile_forUnrestrictedReleasedActiveEmbargoFile() { + DataFile testDataFile = new DataFile(); + testDataFile.setId(2L); + + // With an embargo, an unrestricted file should only be accessible if the embargo has ended + + Embargo testEmbargo = new Embargo(LocalDate.now().plusDays(3), "Still embargoed"); + testDataFile.setEmbargo(testEmbargo); + + DatasetVersion testDatasetVersion = new DatasetVersion(); + testDatasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED); + + FileMetadata testFileMetadata = new FileMetadata(); + testFileMetadata.setId(1L); + testFileMetadata.setRestricted(false); + testFileMetadata.setDataFile(testDataFile); + testFileMetadata.setDatasetVersion(testDatasetVersion); + mockPermissionResponseRequestOn(false); + + assertFalse(sut.canDownloadFile(testUser, testFileMetadata)); + } + + @Test + void testCanDownloadFile_forUnrestrictedReleasedExpiredEmbargoFile() { + DataFile testDataFile = new DataFile(); + testDataFile.setId(2L); + + // With an embargo, an unrestricted file should only be accessible if the embargo has ended + + Embargo testEmbargo = new Embargo(LocalDate.now().minusDays(3), "Was embargoed"); + testDataFile.setEmbargo(testEmbargo); + + DatasetVersion testDatasetVersion = new DatasetVersion(); + testDatasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED); + + FileMetadata testFileMetadata = new FileMetadata(); + testFileMetadata.setId(1L); + testFileMetadata.setRestricted(false); + testFileMetadata.setDataFile(testDataFile); + testFileMetadata.setDatasetVersion(testDatasetVersion); + + assertTrue(sut.canDownloadFile(testUser, testFileMetadata)); + } + + @ParameterizedTest + @CsvSource({"false", "true"}) + void testCanDownloadFile_forRestrictedReleasedFile(boolean hasPermission) { + DataFile testDataFile = new DataFile(); + testDataFile.setId(2L); + + DatasetVersion testDatasetVersion = new DatasetVersion(); + testDatasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED); + + FileMetadata testFileMetadata = new FileMetadata(); + testFileMetadata.setId(1L); + testFileMetadata.setRestricted(true); + testFileMetadata.setDataFile(testDataFile); + testFileMetadata.setDatasetVersion(testDatasetVersion); + + mockPermissionResponseRequestOn(hasPermission); + + Assertions.assertEquals(hasPermission, sut.canDownloadFile(testUser, testFileMetadata)); + } + + @ParameterizedTest + @CsvSource({"false", "true"}) + void testCanDownloadFile_forRestrictedReleasedFileWithActiveEmbargo(boolean hasPermission) { + DataFile testDataFile = new DataFile(); + testDataFile.setId(2L); + + // With an active embargo, a restricted file should have the same access regardless of + // embargo state (with an active embargo, there's no way to request permissions, + // so the hasPermission=true case primarily applies to the original dataset + // creators) + + Embargo testEmbargo = new Embargo(LocalDate.now().plusDays(3), "Still embargoed"); + testDataFile.setEmbargo(testEmbargo); + DatasetVersion testDatasetVersion = new DatasetVersion(); + testDatasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED); + + FileMetadata testFileMetadata = new FileMetadata(); + testFileMetadata.setId(1L); + testFileMetadata.setRestricted(true); + testFileMetadata.setDataFile(testDataFile); + testFileMetadata.setDatasetVersion(testDatasetVersion); + + mockPermissionResponseRequestOn(hasPermission); + + Assertions.assertEquals(hasPermission, sut.canDownloadFile(testUser, testFileMetadata)); + } + + @ParameterizedTest + @CsvSource({"false", "true"}) + void testCanDownloadFile_forRestrictedReleasedFileWithExpiredEmbargo(boolean hasPermission) { + DataFile testDataFile = new DataFile(); + testDataFile.setId(2L); + + // With an embargo, a restricted file should have the same access regardless of + // embargo state (with an active embargo, there's no way to request permissions, + // so the hasPermission=true case primarily applies to the original dataset + // creators) + + Embargo testEmbargo = new Embargo(LocalDate.now().minusDays(3), "No longer embargoed"); + testDataFile.setEmbargo(testEmbargo); + DatasetVersion testDatasetVersion = new DatasetVersion(); + testDatasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED); + + FileMetadata testFileMetadata = new FileMetadata(); + testFileMetadata.setId(1L); + testFileMetadata.setRestricted(true); + testFileMetadata.setDataFile(testDataFile); + testFileMetadata.setDatasetVersion(testDatasetVersion); + + mockPermissionResponseRequestOn(hasPermission); + + Assertions.assertEquals(hasPermission, sut.canDownloadFile(testUser, testFileMetadata)); + } + + private void mockPermissionResponseUserOn(boolean response) { + PermissionServiceBean.StaticPermissionQuery staticPermissionQueryMock = mock(PermissionServiceBean.StaticPermissionQuery.class); + + when(permissionServiceBeanStub.userOn(ArgumentMatchers.any(), ArgumentMatchers.any(Dataset.class))).thenReturn(staticPermissionQueryMock); + when(staticPermissionQueryMock.has(Permission.EditDataset)).thenReturn(response); + } + + private void mockPermissionResponseRequestOn(boolean response) { + PermissionServiceBean.RequestPermissionQuery requestPermissionQueryMock = mock(PermissionServiceBean.RequestPermissionQuery.class); + + when(permissionServiceBeanStub.requestOn(ArgumentMatchers.any(), ArgumentMatchers.any(DataFile.class))).thenReturn(requestPermissionQueryMock); + when(requestPermissionQueryMock.has(Permission.DownloadFile)).thenReturn(response); + } +} From fbf67ff496928399a447e4c49ef207b5bb0d5eae Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 6 Jul 2023 10:27:02 -0400 Subject: [PATCH 0464/1525] don't assume dataTables for all tabular files --- .../iq/dataverse/export/ddi/DdiExportUtil.java | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index 29a59cea795..020800a7d10 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -1733,8 +1733,8 @@ public static void createDataDscr(XMLStreamWriter xmlw, JsonArray fileDetails) t } } - - if (fileJson.containsKey("dataTables")) { + // originalFileFormat is one of several keys that only exist for tabular data + if (fileJson.containsKey("originalFileFormat")) { if (!tabularData) { xmlw.writeStartElement("dataDscr"); tabularData = true; @@ -1745,12 +1745,14 @@ public static void createDataDscr(XMLStreamWriter xmlw, JsonArray fileDetails) t createVarGroupDDI(xmlw, varGroups.getJsonObject(j)); } } - JsonObject dataTable = fileJson.getJsonArray("dataTables").getJsonObject(0); - JsonArray vars = dataTable.getJsonArray("dataVariables"); - if (vars != null) { - for (int j = 0; j < vars.size(); j++) { - createVarDDI(xmlw, vars.getJsonObject(j), fileJson.getJsonNumber("id").toString(), - fileJson.getJsonNumber("fileMetadataId").toString()); + if (fileJson.containsKey("dataTables")) { + JsonObject dataTable = fileJson.getJsonArray("dataTables").getJsonObject(0); + JsonArray vars = dataTable.getJsonArray("dataVariables"); + if (vars != null) { + for (int j = 0; j < vars.size(); j++) { + createVarDDI(xmlw, vars.getJsonObject(j), fileJson.getJsonNumber("id").toString(), + fileJson.getJsonNumber("fileMetadataId").toString()); + } } } } From e0428c2e9d75d7e026d7b7937925ca42fe434bce Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 6 Jul 2023 10:38:43 -0400 Subject: [PATCH 0465/1525] change dataTable check in fileDscr not dataDscr --- .../dataverse/export/ddi/DdiExportUtil.java | 29 ++++++++++--------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index 020800a7d10..12b3c0d0c1e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -1733,8 +1733,7 @@ public static void createDataDscr(XMLStreamWriter xmlw, JsonArray fileDetails) t } } - // originalFileFormat is one of several keys that only exist for tabular data - if (fileJson.containsKey("originalFileFormat")) { + if (fileJson.containsKey("dataTables")) { if (!tabularData) { xmlw.writeStartElement("dataDscr"); tabularData = true; @@ -1745,14 +1744,12 @@ public static void createDataDscr(XMLStreamWriter xmlw, JsonArray fileDetails) t createVarGroupDDI(xmlw, varGroups.getJsonObject(j)); } } - if (fileJson.containsKey("dataTables")) { - JsonObject dataTable = fileJson.getJsonArray("dataTables").getJsonObject(0); - JsonArray vars = dataTable.getJsonArray("dataVariables"); - if (vars != null) { - for (int j = 0; j < vars.size(); j++) { - createVarDDI(xmlw, vars.getJsonObject(j), fileJson.getJsonNumber("id").toString(), - fileJson.getJsonNumber("fileMetadataId").toString()); - } + JsonObject dataTable = fileJson.getJsonArray("dataTables").getJsonObject(0); + JsonArray vars = dataTable.getJsonArray("dataVariables"); + if (vars != null) { + for (int j = 0; j < vars.size(); j++) { + createVarDDI(xmlw, vars.getJsonObject(j), fileJson.getJsonNumber("id").toString(), + fileJson.getJsonNumber("fileMetadataId").toString()); } } } @@ -2027,9 +2024,12 @@ private static void createFileDscr(XMLStreamWriter xmlw, JsonArray fileDetails) String dataverseUrl = SystemConfig.getDataverseSiteUrlStatic(); for (int i =0;i Date: Thu, 6 Jul 2023 10:49:27 -0400 Subject: [PATCH 0466/1525] catch additional use of dt when null --- .../java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index 12b3c0d0c1e..73ba2d204ce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -2074,7 +2074,7 @@ private static void createFileDscr(XMLStreamWriter xmlw, JsonArray fileDetails) // various notes: // this specially formatted note section is used to store the UNF // (Universal Numeric Fingerprint) signature: - if (dt.containsKey("UNF") && !dt.getString("UNF").isBlank()) { + if ((dt!=null) && (dt.containsKey("UNF") && !dt.getString("UNF").isBlank())) { xmlw.writeStartElement("notes"); writeAttribute(xmlw, "level", LEVEL_FILE); writeAttribute(xmlw, "type", NOTE_TYPE_UNF); From d5a19f218d82baa377d4dcd26dcb29c4626a9d88 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 6 Jul 2023 16:15:39 -0400 Subject: [PATCH 0467/1525] add guestbookPopupAction from kebab for request access case --- src/main/webapp/file-download-button-fragment.xhtml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index 2a8152d5395..a0b79671ae2 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -28,9 +28,10 @@ rendered="#{fileDownloadHelper.session.user.authenticated}"> + #{fileMetadata.dataFile.containsFileAccessRequestFromUser(dataverseSession.user) ? bundle['file.accessRequested'] : bundle['file.requestAccess']} From 91fafd7a9b84c8979bb6859d1a29108ab74b8305 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 6 Jul 2023 16:16:03 -0400 Subject: [PATCH 0468/1525] add todo re: guestbook/no terms case --- src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index a5fb98f7c49..d1e36370d32 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1599,6 +1599,7 @@ public static boolean isRequestAccessPopupRequired(DatasetVersion datasetVersion if (answer != null) { return answer; } + //ToDo - also check for guestbook when guestbook at request is true? logger.fine("Request access popup is not required."); return false; } From 1793b33534a9430ecb7dfd6197738b4a9a9a77aa Mon Sep 17 00:00:00 2001 From: sirineREKIK Date: Fri, 7 Jul 2023 09:18:22 +0200 Subject: [PATCH 0469/1525] add key in header api --- doc/sphinx-guides/source/api/native-api.rst | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 6b1bc72fea1..11fb7c49c1c 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -4549,12 +4549,10 @@ A curl example listing objects export PUBLISHED_STATES=Unpublished export PER_PAGE=10 - curl "$SERVER_URL/api/mydata/retrieve?key=$API_TOKEN&role_ids=$ROLE_IDS&dvobject_types=$DVOBJECT_TYPES&published_states=$PUBLISHED_STATES&per_page=$PER_PAGE" + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/mydata/retrieve?role_ids=$ROLE_IDS&dvobject_types=$DVOBJECT_TYPES&published_states=$PUBLISHED_STATES&per_page=$PER_PAGE" Parameters: -``key`` Is the user token, for this API is must not be passed in the header. - ``role_id`` Roles are customizable. Standard roles include: - ``1`` = Admin From 6ead834fe9ca1b7ad1ac1c5d83ff6e156df08255 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 7 Jul 2023 08:55:54 +0100 Subject: [PATCH 0470/1525] Added: canDataFileBeDownloaded API endpoint --- .../edu/harvard/iq/dataverse/api/Files.java | 15 +++++++-- .../edu/harvard/iq/dataverse/api/FilesIT.java | 32 +++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 6 ++++ 3 files changed, 51 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 467341f4077..341fb94e086 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -11,6 +11,7 @@ import edu.harvard.iq.dataverse.DataverseRequestServiceBean; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.EjbDataverseEngine; +import edu.harvard.iq.dataverse.FileDownloadServiceBean; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; import edu.harvard.iq.dataverse.TermsOfUseAndAccessValidator; @@ -106,6 +107,9 @@ public class Files extends AbstractApiBean { @Inject GuestbookResponseServiceBean guestbookResponseService; + @Inject + FileDownloadServiceBean fileDownloadServiceBean; + private static final Logger logger = Logger.getLogger(Files.class.getName()); @@ -824,11 +828,18 @@ public Response getFixityAlgorithm() { @GET @Path("{id}/guestbookResponses/count") - public Response getCountGuestbookResponsesByDataFileId(@PathParam("id") String dataFileId) { + public Response getCountGuestbookResponsesByDataFileId(@PathParam("id") long dataFileId) { try { - return ok(guestbookResponseService.getCountGuestbookResponsesByDataFileId(Long.parseLong(dataFileId)).toString()); + return ok(guestbookResponseService.getCountGuestbookResponsesByDataFileId(dataFileId).toString()); } catch (NumberFormatException nfe) { return badRequest("File identifier has to be numeric."); } } + + @GET + @AuthRequired + @Path("{id}/canBeDownloaded") + public Response canDataFileBeDownloaded(@Context ContainerRequestContext crc, @PathParam("id") long dataFileId) { + return ok(fileDownloadServiceBean.canDownloadFile(getRequestUser(crc), fileSvc.find(dataFileId).getFileMetadata())); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 49b41d1e0e0..033ebe25062 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2054,4 +2054,36 @@ public void testGetCountGuestbookResponsesByDataFileId() { .statusCode(OK.getStatusCode()) .body("data.message", equalTo("1")); } + + @Test + public void testCanDataFileBeDownloaded() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload test file + String pathToTestFile = "src/main/webapp/resources/images/dataverseproject.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Publish collection and dataset + UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); + UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + + // Assert user can download test file + Integer testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); + Response canDataFileBeDownloadedResponse = UtilIT.canDataFileBeDownloaded(testFileId, apiToken); + + canDataFileBeDownloadedResponse.then().assertThat().statusCode(OK.getStatusCode()); + boolean canDownloadTestFile = JsonPath.from(canDataFileBeDownloadedResponse.body().asString()).getBoolean("data"); + assertTrue(canDownloadTestFile); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 6c2543bcf82..4745cc7d2eb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3275,4 +3275,10 @@ static Response getCountGuestbookResponsesByDataFileId(Integer dataFileId, Strin .header(API_TOKEN_HTTP_HEADER, apiToken) .get("/api/files/" + dataFileId + "/guestbookResponses/count"); } + + static Response canDataFileBeDownloaded(Integer dataFileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/files/" + dataFileId + "/canBeDownloaded"); + } } From fae00a7809db31f8946c65d3c240357bf9273309 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 7 Jul 2023 10:48:11 -0400 Subject: [PATCH 0471/1525] Handle kebab menu, add resize to popup show request --- .../harvard/iq/dataverse/FileDownloadHelper.java | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java index b5f2ee97094..ce2539758ab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java @@ -143,10 +143,15 @@ public void writeGuestbookAndLaunchPackagePopup(GuestbookResponse guestbookRespo fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); } + public void writeGuestbookResponseAndRequestAccess(GuestbookResponse guestbookResponse) { - //requestContext.execute("PF('guestbookAndTermsPopup').hide()"); - PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); - fileDownloadService.writeGuestbookResponseAndRequestAccess(guestbookResponse); + + if(!filesForRequestAccess.isEmpty()) { + /* Only for single file requests (i.e. from kebab menu) */ + guestbookResponse.setDataFile(filesForRequestAccess.get(0)); + } + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); + fileDownloadService.writeGuestbookResponseAndRequestAccess(guestbookResponse); } /** @@ -290,7 +295,7 @@ public void handleCommandLinkClick(FileMetadata fmd){ if (FileUtil.isRequestAccessPopupRequired(fmd.getDatasetVersion())){ addFileForRequestAccess(fmd.getDataFile()); - PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"); } else { requestAccess(fmd.getDataFile()); } From 892d796a279df87dfb1b6608d5d02aff206f8b0a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 7 Jul 2023 12:20:56 -0400 Subject: [PATCH 0472/1525] set state for grant and reject --- .../edu/harvard/iq/dataverse/DataFile.java | 17 +++++++++++------ .../dataverse/ManageFilePermissionsPage.java | 19 ++++++++----------- .../edu/harvard/iq/dataverse/api/Access.java | 15 +++++++++------ 3 files changed, 28 insertions(+), 23 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 156703d3d0b..ff8c544ddfb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -794,15 +794,20 @@ public void addFileAccessRequester(AuthenticatedUser authenticatedUser) { this.fileAccessRequests.add(request); } - public boolean removeFileAccessRequester(RoleAssignee roleAssignee) { + public FileAccessRequest getAccessRequestForAssignee(RoleAssignee roleAssignee) { if (this.fileAccessRequests == null) { - return false; + return null; } - FileAccessRequest request = this.fileAccessRequests.stream() - .filter(fileAccessRequest -> fileAccessRequest.getRequester().equals(roleAssignee)) - .findFirst() - .orElse(null); + return this.fileAccessRequests.stream() + .filter(fileAccessRequest -> fileAccessRequest.getRequester().equals(roleAssignee)).findFirst() + .orElse(null); + } + + public boolean removeFileAccessRequest(FileAccessRequest request) { + if (this.fileAccessRequests == null) { + return false; + } if (request != null) { this.fileAccessRequests.remove(request); diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java index 6bf548e65d3..bc9d79eac87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java @@ -458,14 +458,9 @@ private void grantAccessToRequests(AuthenticatedUser au, List files) { DataverseRole fileDownloaderRole = roleService.findBuiltinRoleByAlias(DataverseRole.FILE_DOWNLOADER); for (DataFile file : files) { if (assignRole(au, file, fileDownloaderRole)) { - //TODO - why remove requests just to set them again? - if (file.removeFileAccessRequester(au)) { - List fileAccessRequests = fileAccessRequestService.findAll(au.getId(), file.getId(), FileAccessRequest.RequestState.CREATED); - for(FileAccessRequest far : fileAccessRequests){ - far.setStateGranted(); - fileAccessRequestService.save(far); - } - file.setFileAccessRequests(fileAccessRequests); + FileAccessRequest far = file.getAccessRequestForAssignee(au); + if (far!=null) { + far.setStateGranted(); datafileService.save(file); } actionPerformed = true; @@ -497,9 +492,11 @@ public void rejectAccessToAllRequests(AuthenticatedUser au) { private void rejectAccessToRequests(AuthenticatedUser au, List files) { boolean actionPerformed = false; for (DataFile file : files) { - if(file.removeFileAccessRequester(au)) { - - // TODO - set FileAccessRequest.RequestState to REJECTED + FileAccessRequest far = file.getAccessRequestForAssignee(au); + if(far!=null) { + far.setStateRejected(); + fileAccessRequestService.save(far); + file.removeFileAccessRequest(far); datafileService.save(file); actionPerformed = true; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 358b0159e2a..687b859c936 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -1535,7 +1535,9 @@ public Response grantFileAccess(@Context ContainerRequestContext crc, @PathParam try { engineSvc.submit(new AssignRoleCommand(ra, fileDownloaderRole, dataFile, dataverseRequest, null)); - if (dataFile.removeFileAccessRequester(ra)) { + FileAccessRequest far = dataFile.getAccessRequestForAssignee(ra); + if(far!=null) { + far.setStateGranted(); dataFileService.save(dataFile); } @@ -1660,20 +1662,21 @@ public Response rejectFileAccess(@Context ContainerRequestContext crc, @PathPara if (!(dataverseRequest.getAuthenticatedUser().isSuperuser() || permissionService.requestOn(dataverseRequest, dataFile).has(Permission.ManageFilePermissions))) { return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.rejectAccess.failure.noPermissions")); } - - if (dataFile.removeFileAccessRequester(ra)) { + FileAccessRequest far = dataFile.getAccessRequestForAssignee(ra); + if (far != null) { + far.setStateRejected(); dataFileService.save(dataFile); try { AuthenticatedUser au = (AuthenticatedUser) ra; - userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.REJECTFILEACCESS, dataFile.getOwner().getId()); + userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), + UserNotification.Type.REJECTFILEACCESS, dataFile.getOwner().getId()); } catch (ClassCastException e) { - //nothing to do here - can only send a notification to an authenticated user + // nothing to do here - can only send a notification to an authenticated user } List args = Arrays.asList(dataFile.getDisplayName()); return ok(BundleUtil.getStringFromBundle("access.api.rejectAccess.success.for.single.file", args)); - } else { List args = Arrays.asList(dataFile.getDisplayName(), ra.getDisplayInfo().getTitle()); return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.fileAccess.rejectFailure.noRequest", args)); From ef16a855f14ce2432e0518203033a6dcf587f8dd Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 7 Jul 2023 12:37:07 -0400 Subject: [PATCH 0473/1525] get requests by state/only get CREATED in perms page --- src/main/java/edu/harvard/iq/dataverse/DataFile.java | 4 ++++ .../edu/harvard/iq/dataverse/ManageFilePermissionsPage.java | 6 ++---- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index ff8c544ddfb..daeaf4f55c0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -209,6 +209,10 @@ public String toString() { public List getFileAccessRequests(){ return fileAccessRequests; } + + public List getFileAccessRequests(FileAccessRequest.RequestState state){ + return fileAccessRequests.stream().filter(far -> far.getState() == state).collect(Collectors.toList()); + } public void setFileAccessRequests(List fARs){ this.fileAccessRequests = fARs; diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java index bc9d79eac87..ddb853a3db9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java @@ -181,16 +181,14 @@ private void initMaps() { fileMap.put(file, raList); // populate the file access requests map - for (FileAccessRequest fileAccessRequest : file.getFileAccessRequests()) { + for (FileAccessRequest fileAccessRequest : file.getFileAccessRequests(FileAccessRequest.RequestState.CREATED)) { List requestedFiles = fileAccessRequestMap.get(fileAccessRequest.getRequester()); if (requestedFiles == null) { requestedFiles = new ArrayList<>(); AuthenticatedUser withProvider = authenticationService.getAuthenticatedUserWithProvider(fileAccessRequest.getRequester().getUserIdentifier()); fileAccessRequestMap.put(withProvider, requestedFiles); } - if(fileAccessRequest.getState() == RequestState.CREATED){ - requestedFiles.add(fileAccessRequest); - } + requestedFiles.add(fileAccessRequest); } } } From c07e960498e6386f28e7e9528fa8474b858272fe Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 7 Jul 2023 14:15:23 -0400 Subject: [PATCH 0474/1525] only list unresolved (state=CREATED) access requests --- src/main/java/edu/harvard/iq/dataverse/DataFile.java | 4 ++-- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- .../edu/harvard/iq/dataverse/FileDownloadServiceBean.java | 6 ++---- src/main/java/edu/harvard/iq/dataverse/api/Access.java | 2 +- src/main/webapp/file-download-button-fragment.xhtml | 8 ++++---- src/main/webapp/filesFragment.xhtml | 2 +- 6 files changed, 11 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index daeaf4f55c0..4f91e17256c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -821,12 +821,12 @@ public boolean removeFileAccessRequest(FileAccessRequest request) { return false; } - public boolean containsFileAccessRequestFromUser(RoleAssignee roleAssignee) { + public boolean containsActiveFileAccessRequestFromUser(RoleAssignee roleAssignee) { if (this.fileAccessRequests == null) { return false; } - Set existingUsers = this.fileAccessRequests.stream() + Set existingUsers = getFileAccessRequests(FileAccessRequest.RequestState.CREATED).stream() .map(FileAccessRequest::getRequester) .collect(Collectors.toSet()); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index b8020404e91..6ce03ad3a51 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -5151,7 +5151,7 @@ public boolean isFileAccessRequestMultiButtonRequired(){ for (FileMetadata fmd : workingVersion.getFileMetadatas()){ AuthenticatedUser authenticatedUser = (AuthenticatedUser) session.getUser(); //Change here so that if all restricted files have pending requests there's no Request Button - if ((!this.fileDownloadHelper.canDownloadFile(fmd) && !fmd.getDataFile().containsFileAccessRequestFromUser(authenticatedUser))) { + if ((!this.fileDownloadHelper.canDownloadFile(fmd) && !fmd.getDataFile().containsActiveFileAccessRequestFromUser(authenticatedUser))) { return true; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index f7612300eaf..6341f00d00c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -535,7 +535,7 @@ public boolean requestAccess(Long fileId) { return false; } DataFile file = datafileService.find(fileId); - if (!file.containsFileAccessRequestFromUser(session.getUser())) { + if (!file.containsActiveFileAccessRequestFromUser(session.getUser())) { try { commandEngine.submit(new RequestAccessCommand(dvRequestService.getDataverseRequest(), file)); return true; @@ -553,9 +553,7 @@ public boolean requestAccess(DataFile dataFile, GuestbookResponse gbr){ return accessRequested; } - List fARs = dataFile.getFileAccessRequesters(); - - if(fARs.isEmpty() || (!fARs.isEmpty() && !fARs.contains((AuthenticatedUser)session.getUser()))){ + if(!dataFile.containsActiveFileAccessRequestFromUser(session.getUser())) { try { commandEngine.submit(new RequestAccessCommand(dvRequestService.getDataverseRequest(), dataFile, gbr)); accessRequested = true; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 687b859c936..70b1bdcdd2f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -1428,7 +1428,7 @@ public Response requestFileAccess(@Context ContainerRequestContext crc, @PathPar return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.requestAccess.failure.invalidRequest")); } - if (dataFile.containsFileAccessRequestFromUser(requestor)) { + if (dataFile.containsActiveFileAccessRequestFromUser(requestor)) { return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.requestAccess.failure.requestExists")); } diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index a0b79671ae2..5c29cd6b041 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -24,15 +24,15 @@ - - + disabled="#{fileMetadata.dataFile.containsActiveFileAccessRequestFromUser(dataverseSession.user)}"> - #{fileMetadata.dataFile.containsFileAccessRequestFromUser(dataverseSession.user) ? bundle['file.accessRequested'] : bundle['file.requestAccess']} + #{fileMetadata.dataFile.containsActiveFileAccessRequestFromUser(dataverseSession.user) ? bundle['file.accessRequested'] : bundle['file.requestAccess']}
  • diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 7e1cb4ac4cd..5f2c15df048 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -514,7 +514,7 @@ data-content="#{DatasetPage.ingestMessage} #{fileMetadata.dataFile.ingestReportMessage}"/>
  • -
    +
    #{bundle['file.accessRequested']}  From 60165813192a3582557db840aa941d93217b8494 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 7 Jul 2023 14:19:19 -0400 Subject: [PATCH 0475/1525] fix bug from #9257 which changed return type to FAR from DataFile seen in the Edit Permissions/File dialog when clicking the link with the # of files listed. In that dialog, things explode trying to get the directoryLabel of a FileAccessRequest (seen in log) and the popup dialog incorrectly shows the input to select a user, only has a grant button (no reject), and doesn't work (the last could be an issue with this branch only). --- .../dataverse/ManageFilePermissionsPage.java | 21 ++++++++++++++----- .../webapp/permissions-manage-files.xhtml | 2 +- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java index ddb853a3db9..e8e9ae9f167 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java @@ -29,6 +29,8 @@ import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; +import java.util.stream.Collectors; + import javax.ejb.EJB; import javax.faces.application.FacesMessage; import javax.faces.event.ActionEvent; @@ -89,6 +91,15 @@ public class ManageFilePermissionsPage implements java.io.Serializable { public TreeMap> getFileAccessRequestMap() { return fileAccessRequestMap; } + + public List getDataFilesForRequestor() { + List fars = fileAccessRequestMap.get(getFileRequester()); + if (fars == null) { + return new ArrayList<>(); + } else { + return fars.stream().map(FileAccessRequest::getDataFile).collect(Collectors.toList()); + } + } private final TreeMap> fileAccessRequestMap = new TreeMap<>(); private boolean showDeleted = true; @@ -182,13 +193,13 @@ private void initMaps() { // populate the file access requests map for (FileAccessRequest fileAccessRequest : file.getFileAccessRequests(FileAccessRequest.RequestState.CREATED)) { - List requestedFiles = fileAccessRequestMap.get(fileAccessRequest.getRequester()); - if (requestedFiles == null) { - requestedFiles = new ArrayList<>(); + List fileAccessRequestList = fileAccessRequestMap.get(fileAccessRequest.getRequester()); + if (fileAccessRequestList == null) { + fileAccessRequestList = new ArrayList<>(); AuthenticatedUser withProvider = authenticationService.getAuthenticatedUserWithProvider(fileAccessRequest.getRequester().getUserIdentifier()); - fileAccessRequestMap.put(withProvider, requestedFiles); + fileAccessRequestMap.put(withProvider, fileAccessRequestList); } - requestedFiles.add(fileAccessRequest); + fileAccessRequestList.add(fileAccessRequest); } } } diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index d3109da69a6..4e4e56f2051 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -322,7 +322,7 @@ From 0720c93fd7b3e1728824a24e36c3e27a8db65509 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 7 Jul 2023 15:03:02 -0400 Subject: [PATCH 0476/1525] fix grant in popup from # files link in Files column --- .../java/edu/harvard/iq/dataverse/DataFile.java | 2 +- .../iq/dataverse/ManageFilePermissionsPage.java | 14 ++++---------- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 4f91e17256c..7b59487d3f3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -804,7 +804,7 @@ public FileAccessRequest getAccessRequestForAssignee(RoleAssignee roleAssignee) } return this.fileAccessRequests.stream() - .filter(fileAccessRequest -> fileAccessRequest.getRequester().equals(roleAssignee)).findFirst() + .filter(fileAccessRequest -> fileAccessRequest.getRequester().equals(roleAssignee) && fileAccessRequest.isStateCreated()).findFirst() .orElse(null); } diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java index e8e9ae9f167..4796d0956c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java @@ -421,18 +421,12 @@ public void grantAccess(ActionEvent evt) { if (file.isReleased()) { sendNotification = true; } - // remove request, if it exist + // set request(s) granted, if they exist for (AuthenticatedUser au : roleAssigneeService.getExplicitUsers(roleAssignee)) { - if (file.getFileAccessRequesters().remove(au)) { - List fileAccessRequests = fileAccessRequestService.findAllByAuthenticatedUserIdAndRequestState(au.getId(), FileAccessRequest.RequestState.CREATED); - for(FileAccessRequest far : fileAccessRequests){ - far.setStateGranted(); - fileAccessRequestService.save(far); - } - file.setFileAccessRequests(fileAccessRequests); - datafileService.save(file); - } + FileAccessRequest far = file.getAccessRequestForAssignee(au); + far.setStateGranted(); } + datafileService.save(file); } } From 267a5ccb7bd4c9691a61bddb48e60e8067550656 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 7 Jul 2023 15:03:53 -0400 Subject: [PATCH 0477/1525] only require one CREATED request per datafile, auth user and allow multiple granted/rejected ones --- src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java | 4 +--- .../db/migration/V5.13.0.3__guestbook-on-request.sql | 2 ++ 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java index e76a3414b52..a9be3ecac5e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java @@ -27,9 +27,7 @@ */ @Entity -@Table(name = "fileaccessrequests", //having added the guestbookresponse_id column to fileaccessrequests - uniqueConstraints=@UniqueConstraint(columnNames={"datafile_id", "authenticated_user_id","request_state"}) //this may not make sense at some future point -) +@Table(name = "fileaccessrequests") @NamedQueries({ @NamedQuery(name = "FileAccessRequest.findByAuthenticatedUserId", diff --git a/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql b/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql index 20bc1fc1c76..166fb9554c6 100644 --- a/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql +++ b/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql @@ -5,3 +5,5 @@ ALTER TABLE fileaccessrequests ADD CONSTRAINT fileaccessrequests_pkey PRIMARY KE ALTER TABLE fileaccessrequests ADD COLUMN IF NOT EXISTS guestbookresponse_id INT; ALTER TABLE fileaccessrequests DROP CONSTRAINT IF EXISTS fk_fileaccessrequests_guestbookresponse; ALTER TABLE fileaccessrequests ADD CONSTRAINT fk_fileaccessrequests_guestbookresponse FOREIGN KEY (guestbookresponse_id) REFERENCES guestbookresponse(id); +ALTER TABLE fileaccessrequests DROP CONSTRAINT IF EXISTS created_requests; +CREATE UNIQUE INDEX created_requests ON fileaccessrequests (datafile_id, authenticated_user_id) WHERE request_state='CREATED'; From 4ba20d606e6f595b97f5960386130f7af6acfb76 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 7 Jul 2023 15:05:42 -0400 Subject: [PATCH 0478/1525] simplify states --- .../iq/dataverse/FileAccessRequest.java | 47 +------------------ 1 file changed, 1 insertion(+), 46 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java index a9be3ecac5e..ddde8049dff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java @@ -67,7 +67,7 @@ public class FileAccessRequest implements Serializable{ @JoinColumn(nullable=true) private GuestbookResponse guestbookResponse; - public enum RequestState {CREATED,EDITED,GRANTED,REJECTED,RESUBMIT,INVALIDATED,CLOSED}; + public enum RequestState {CREATED, GRANTED, REJECTED}; //private RequestState state; @Enumerated(EnumType.STRING) @Column(name="request_state", nullable=false ) @@ -138,24 +138,12 @@ public String getStateLabel() { if(isStateCreated()){ return "created"; } - if(isStateEdited()) { - return "edited"; - } if(isStateGranted()) { return "granted"; } if(isStateRejected()) { return "rejected"; } - if(isStateResubmit()) { - return "resubmit"; - } - if(isStateInvalidated()) { - return "invalidated"; - } - if(isStateClosed()) { - return "closed"; - } return null; } @@ -163,10 +151,6 @@ public void setStateCreated() { this.requestState = RequestState.CREATED; } - public void setStateEdited() { - this.requestState = RequestState.EDITED; - } - public void setStateGranted() { this.requestState = RequestState.GRANTED; } @@ -175,27 +159,10 @@ public void setStateRejected() { this.requestState = RequestState.REJECTED; } - public void setStateResubmit() { - this.requestState = RequestState.RESUBMIT; - } - - public void setStateInvalidated() { - this.requestState = RequestState.INVALIDATED; - } - - public void setStateClosed() { - this.requestState = RequestState.CLOSED; - } - - public boolean isStateCreated() { return this.requestState == RequestState.CREATED; } - public boolean isStateEdited() { - return this.requestState == RequestState.EDITED; - } - public boolean isStateGranted() { return this.requestState == RequestState.GRANTED; } @@ -203,18 +170,6 @@ public boolean isStateGranted() { public boolean isStateRejected() { return this.requestState == RequestState.REJECTED; } - - public boolean isStateResubmit() { - return this.requestState == RequestState.RESUBMIT; - } - - public boolean isStateInvalidated() { - return this.requestState == RequestState.INVALIDATED; - } - - public boolean isStateClosed() { - return this.requestState == RequestState.CLOSED; - } @Override public int hashCode() { From e1e075cf7961d922d35c89eb9f719fa0451ba56d Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 7 Jul 2023 16:25:03 -0400 Subject: [PATCH 0479/1525] typo in comment --- src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java index ce2539758ab..7ec8272ac5b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java @@ -310,7 +310,7 @@ public void requestAccessMultiple(List files) { DataFile notificationFile = null; for (DataFile file : files) { //Not sending notification via request method so that - // we can bundle them up into one nofication at dataset level + // we can bundle them up into one notification at dataset level test = processRequestAccess(file, false); succeeded |= test; if (notificationFile == null) { From f980d7ba1f3ad395ffbb21a27a04e2e41433a7f7 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 7 Jul 2023 16:25:23 -0400 Subject: [PATCH 0480/1525] Fix idempotence --- .../resources/db/migration/V5.13.0.3__guestbook-on-request.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql b/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql index 166fb9554c6..a92d8910f6d 100644 --- a/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql +++ b/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql @@ -5,5 +5,5 @@ ALTER TABLE fileaccessrequests ADD CONSTRAINT fileaccessrequests_pkey PRIMARY KE ALTER TABLE fileaccessrequests ADD COLUMN IF NOT EXISTS guestbookresponse_id INT; ALTER TABLE fileaccessrequests DROP CONSTRAINT IF EXISTS fk_fileaccessrequests_guestbookresponse; ALTER TABLE fileaccessrequests ADD CONSTRAINT fk_fileaccessrequests_guestbookresponse FOREIGN KEY (guestbookresponse_id) REFERENCES guestbookresponse(id); -ALTER TABLE fileaccessrequests DROP CONSTRAINT IF EXISTS created_requests; +DROP INDEX IF EXISTS created_requests; CREATE UNIQUE INDEX created_requests ON fileaccessrequests (datafile_id, authenticated_user_id) WHERE request_state='CREATED'; From 6a0b0d0551926d56e1c80b9e1d9681984a13a98d Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 7 Jul 2023 16:26:04 -0400 Subject: [PATCH 0481/1525] ignore prior granted/rejected requests fixes multi-file request access button on dataset page --- .../iq/dataverse/authorization/users/AuthenticatedUser.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index bb688fb8acb..3a7bbaf5af9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -222,7 +222,9 @@ public List getRequestedDataFiles(){ List requestedDataFiles = new ArrayList<>(); for(FileAccessRequest far : getFileAccessRequests()){ - requestedDataFiles.add(far.getDataFile()); + if(far.isStateCreated()) { + requestedDataFiles.add(far.getDataFile()); + } } return requestedDataFiles; } From 9f35bf7843f1dc810e77217aec7bc1fe5dab17ad Mon Sep 17 00:00:00 2001 From: GPortas Date: Sun, 9 Jul 2023 15:35:49 +0100 Subject: [PATCH 0482/1525] Added: naming refactor and managing not found files in new files API endpoints --- .../iq/dataverse/FileDownloadServiceBean.java | 1 - .../edu/harvard/iq/dataverse/api/Files.java | 21 ++++++++++++------- .../edu/harvard/iq/dataverse/api/FilesIT.java | 20 +++++++++--------- .../edu/harvard/iq/dataverse/api/UtilIT.java | 8 ++----- 4 files changed, 26 insertions(+), 24 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index 5c490b18ecc..f5bb60510cc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -574,7 +574,6 @@ public String getDirectStorageLocatrion(String storageLocation) { } /** - * * Checks if a user can download a file based on the file metadata and the permissions of the user * * This method is based on {@link edu.harvard.iq.dataverse.FileDownloadHelper#canDownloadFile(FileMetadata), diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 341fb94e086..d4bf28482c9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -106,7 +106,6 @@ public class Files extends AbstractApiBean { MakeDataCountLoggingServiceBean mdcLogService; @Inject GuestbookResponseServiceBean guestbookResponseService; - @Inject FileDownloadServiceBean fileDownloadServiceBean; @@ -828,18 +827,26 @@ public Response getFixityAlgorithm() { @GET @Path("{id}/guestbookResponses/count") - public Response getCountGuestbookResponsesByDataFileId(@PathParam("id") long dataFileId) { + public Response getCountGuestbookResponses(@PathParam("id") String dataFileId) { + DataFile dataFile; try { - return ok(guestbookResponseService.getCountGuestbookResponsesByDataFileId(dataFileId).toString()); - } catch (NumberFormatException nfe) { - return badRequest("File identifier has to be numeric."); + dataFile = findDataFileOrDie(dataFileId); + } catch (WrappedResponse wr) { + return wr.getResponse(); } + return ok(guestbookResponseService.getCountGuestbookResponsesByDataFileId(dataFile.getId()).toString()); } @GET @AuthRequired @Path("{id}/canBeDownloaded") - public Response canDataFileBeDownloaded(@Context ContainerRequestContext crc, @PathParam("id") long dataFileId) { - return ok(fileDownloadServiceBean.canDownloadFile(getRequestUser(crc), fileSvc.find(dataFileId).getFileMetadata())); + public Response canFileBeDownloaded(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + DataFile dataFile; + try { + dataFile = findDataFileOrDie(dataFileId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + return ok(fileDownloadServiceBean.canDownloadFile(getRequestUser(crc), dataFile.getFileMetadata())); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 033ebe25062..8cdc52be1d0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2020,7 +2020,7 @@ public void testDeleteFile() { } @Test - public void testGetCountGuestbookResponsesByDataFileId() { + public void testGetCountGuestbookResponses() { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -2043,20 +2043,20 @@ public void testGetCountGuestbookResponsesByDataFileId() { UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode()); // Download test file - Integer testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); + int testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); Response downloadResponse = UtilIT.downloadFile(testFileId, apiToken); downloadResponse.then().assertThat().statusCode(OK.getStatusCode()); // Get count guestbook responses and assert it is 1 - Response getGuestbookResponsesByDataFileIdResponse = UtilIT.getCountGuestbookResponsesByDataFileId(testFileId, apiToken); - getGuestbookResponsesByDataFileIdResponse.then().assertThat() + Response getCountGuestbookResponsesResponse = UtilIT.getCountGuestbookResponses(testFileId, apiToken); + getCountGuestbookResponsesResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.message", equalTo("1")); } @Test - public void testCanDataFileBeDownloaded() { + public void testCanFileBeDownloaded() { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -2079,11 +2079,11 @@ public void testCanDataFileBeDownloaded() { UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode()); // Assert user can download test file - Integer testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); - Response canDataFileBeDownloadedResponse = UtilIT.canDataFileBeDownloaded(testFileId, apiToken); + int testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); + Response canFileBeDownloadedResponse = UtilIT.canFileBeDownloaded(testFileId, apiToken); - canDataFileBeDownloadedResponse.then().assertThat().statusCode(OK.getStatusCode()); - boolean canDownloadTestFile = JsonPath.from(canDataFileBeDownloadedResponse.body().asString()).getBoolean("data"); - assertTrue(canDownloadTestFile); + canFileBeDownloadedResponse.then().assertThat().statusCode(OK.getStatusCode()); + boolean canFileBeDownloaded = JsonPath.from(canFileBeDownloadedResponse.body().asString()).getBoolean("data"); + assertTrue(canFileBeDownloaded); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 4745cc7d2eb..4f2ab3146ef 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1,6 +1,5 @@ package edu.harvard.iq.dataverse.api; -import com.jayway.restassured.RestAssured; import com.jayway.restassured.http.ContentType; import com.jayway.restassured.path.json.JsonPath; import com.jayway.restassured.response.Response; @@ -12,8 +11,6 @@ import javax.json.JsonObjectBuilder; import javax.json.JsonArrayBuilder; import javax.json.JsonObject; -import javax.ws.rs.client.Client; -import javax.ws.rs.client.ClientBuilder; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; @@ -44,7 +41,6 @@ import static com.jayway.restassured.path.xml.XmlPath.from; import static com.jayway.restassured.RestAssured.given; import edu.harvard.iq.dataverse.DatasetField; -import edu.harvard.iq.dataverse.DatasetFieldConstant; import edu.harvard.iq.dataverse.DatasetFieldType; import edu.harvard.iq.dataverse.DatasetFieldValue; import edu.harvard.iq.dataverse.util.StringUtil; @@ -3270,13 +3266,13 @@ static Response createAndUploadTestFile(String persistentId, String testFileName return uploadZipFileViaSword(persistentId, pathToTestFile, apiToken); } - static Response getCountGuestbookResponsesByDataFileId(Integer dataFileId, String apiToken) { + static Response getCountGuestbookResponses(int dataFileId, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) .get("/api/files/" + dataFileId + "/guestbookResponses/count"); } - static Response canDataFileBeDownloaded(Integer dataFileId, String apiToken) { + static Response canFileBeDownloaded(int dataFileId, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) .get("/api/files/" + dataFileId + "/canBeDownloaded"); From a2bc4d4b1d98cb103706c49e1c357a3407b6c23b Mon Sep 17 00:00:00 2001 From: GPortas Date: Sun, 9 Jul 2023 15:47:08 +0100 Subject: [PATCH 0483/1525] Removed: not essential findDataFileOrDie call to avoid extra query --- src/main/java/edu/harvard/iq/dataverse/api/Files.java | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index d4bf28482c9..714a08f4d2c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -827,14 +827,8 @@ public Response getFixityAlgorithm() { @GET @Path("{id}/guestbookResponses/count") - public Response getCountGuestbookResponses(@PathParam("id") String dataFileId) { - DataFile dataFile; - try { - dataFile = findDataFileOrDie(dataFileId); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - return ok(guestbookResponseService.getCountGuestbookResponsesByDataFileId(dataFile.getId()).toString()); + public Response getCountGuestbookResponses(@PathParam("id") long dataFileId) { + return ok(guestbookResponseService.getCountGuestbookResponsesByDataFileId(dataFileId).toString()); } @GET From ec755348740e33b30f4909e78eee2e9f3c63f8a9 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 10 Jul 2023 09:11:22 +0100 Subject: [PATCH 0484/1525] Added: getFileThumbnailClass API endpoint and enhanced test coverage for new endpoints --- .../edu/harvard/iq/dataverse/api/Files.java | 15 ++++++ .../edu/harvard/iq/dataverse/api/FilesIT.java | 51 ++++++++++++++++--- .../edu/harvard/iq/dataverse/api/UtilIT.java | 8 ++- 3 files changed, 65 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 714a08f4d2c..2f5699d28ca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -3,6 +3,7 @@ import com.google.gson.Gson; import com.google.gson.JsonObject; import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DataFileServiceBean; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.DatasetServiceBean; @@ -108,6 +109,8 @@ public class Files extends AbstractApiBean { GuestbookResponseServiceBean guestbookResponseService; @Inject FileDownloadServiceBean fileDownloadServiceBean; + @Inject + DataFileServiceBean dataFileServiceBean; private static final Logger logger = Logger.getLogger(Files.class.getName()); @@ -843,4 +846,16 @@ public Response canFileBeDownloaded(@Context ContainerRequestContext crc, @PathP } return ok(fileDownloadServiceBean.canDownloadFile(getRequestUser(crc), dataFile.getFileMetadata())); } + + @GET + @Path("{id}/thumbnailClass") + public Response getFileThumbnailClass(@PathParam("id") String dataFileId) { + DataFile dataFile; + try { + dataFile = findDataFileOrDie(dataFileId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + return ok(dataFileServiceBean.getFileThumbnailClass(dataFile)); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 8cdc52be1d0..b3b96dc5234 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2020,7 +2020,7 @@ public void testDeleteFile() { } @Test - public void testGetCountGuestbookResponses() { + public void testGetCountGuestbookResponses() throws InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -2048,6 +2048,9 @@ public void testGetCountGuestbookResponses() { Response downloadResponse = UtilIT.downloadFile(testFileId, apiToken); downloadResponse.then().assertThat().statusCode(OK.getStatusCode()); + // Ensure guestbook is updated + sleep(2000); + // Get count guestbook responses and assert it is 1 Response getCountGuestbookResponsesResponse = UtilIT.getCountGuestbookResponses(testFileId, apiToken); getCountGuestbookResponsesResponse.then().assertThat() @@ -2067,23 +2070,55 @@ public void testCanFileBeDownloaded() { Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); // Upload test file String pathToTestFile = "src/main/webapp/resources/images/dataverseproject.png"; - Response uploadResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); - // Publish collection and dataset - UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); - UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode()); - // Assert user can download test file int testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); - Response canFileBeDownloadedResponse = UtilIT.canFileBeDownloaded(testFileId, apiToken); + Response canFileBeDownloadedResponse = UtilIT.canFileBeDownloaded(Integer.toString(testFileId), apiToken); canFileBeDownloadedResponse.then().assertThat().statusCode(OK.getStatusCode()); boolean canFileBeDownloaded = JsonPath.from(canFileBeDownloadedResponse.body().asString()).getBoolean("data"); assertTrue(canFileBeDownloaded); + + // Call with invalid file id + Response canFileBeDownloadedInvalidIdResponse = UtilIT.canFileBeDownloaded("testInvalidId", apiToken); + canFileBeDownloadedInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + } + + @Test + public void testGetFileThumbnailClass() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload test file + String pathToTestFile = "src/main/webapp/resources/images/dataverseproject.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get file thumbnail class and assert is image + int testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); + Response getFileThumbnailClassResponse = UtilIT.getFileThumbnailClass(Integer.toString(testFileId), apiToken); + + getFileThumbnailClassResponse.then().assertThat().statusCode(OK.getStatusCode()); + String fileThumbnailClass = JsonPath.from(getFileThumbnailClassResponse.body().asString()).getString("data.message"); + assertEquals("image", fileThumbnailClass); + + // Call with invalid file id + Response getFileThumbnailClassInvalidIdResponse = UtilIT.getFileThumbnailClass("testInvalidId", apiToken); + getFileThumbnailClassInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 4f2ab3146ef..502a01f0d32 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3272,9 +3272,15 @@ static Response getCountGuestbookResponses(int dataFileId, String apiToken) { .get("/api/files/" + dataFileId + "/guestbookResponses/count"); } - static Response canFileBeDownloaded(int dataFileId, String apiToken) { + static Response canFileBeDownloaded(String dataFileId, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) .get("/api/files/" + dataFileId + "/canBeDownloaded"); } + + static Response getFileThumbnailClass(String dataFileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/files/" + dataFileId + "/thumbnailClass"); + } } From 86865f5a979625e1f948c20d0618024f792e2c56 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 10 Jul 2023 12:48:08 +0100 Subject: [PATCH 0485/1525] Added: getCountGuestbookResponses PIDs support and param format and data file existence verifications --- src/main/java/edu/harvard/iq/dataverse/api/Files.java | 10 ++++++++-- .../java/edu/harvard/iq/dataverse/api/FilesIT.java | 6 +++++- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 2 +- 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 2f5699d28ca..171f8aa6e87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -830,8 +830,14 @@ public Response getFixityAlgorithm() { @GET @Path("{id}/guestbookResponses/count") - public Response getCountGuestbookResponses(@PathParam("id") long dataFileId) { - return ok(guestbookResponseService.getCountGuestbookResponsesByDataFileId(dataFileId).toString()); + public Response getCountGuestbookResponses(@PathParam("id") String dataFileId) { + DataFile dataFile; + try { + dataFile = findDataFileOrDie(dataFileId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + return ok(guestbookResponseService.getCountGuestbookResponsesByDataFileId(dataFile.getId()).toString()); } @GET diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index b3b96dc5234..fe2007a37e4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2052,10 +2052,14 @@ public void testGetCountGuestbookResponses() throws InterruptedException { sleep(2000); // Get count guestbook responses and assert it is 1 - Response getCountGuestbookResponsesResponse = UtilIT.getCountGuestbookResponses(testFileId, apiToken); + Response getCountGuestbookResponsesResponse = UtilIT.getCountGuestbookResponses(Integer.toString(testFileId), apiToken); getCountGuestbookResponsesResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.message", equalTo("1")); + + // Call with invalid file id + Response getCountGuestbookResponsesInvalidIdResponse = UtilIT.getCountGuestbookResponses("testInvalidId", apiToken); + getCountGuestbookResponsesInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 502a01f0d32..9a471648da4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3266,7 +3266,7 @@ static Response createAndUploadTestFile(String persistentId, String testFileName return uploadZipFileViaSword(persistentId, pathToTestFile, apiToken); } - static Response getCountGuestbookResponses(int dataFileId, String apiToken) { + static Response getCountGuestbookResponses(String dataFileId, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) .get("/api/files/" + dataFileId + "/guestbookResponses/count"); From 5cfcac1874a44b9c8355b3025cfecdd101472fbc Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 10 Jul 2023 10:10:45 -0400 Subject: [PATCH 0486/1525] make FileDownloadHelper visible in file.xhtml --- src/main/java/edu/harvard/iq/dataverse/FilePage.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index f64a38d46d7..70faeeac815 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -1199,5 +1199,14 @@ public String getIngestMessage() { public boolean isHasPublicStore() { return settingsWrapper.isTrueForKey(SettingsServiceBean.Key.PublicInstall, StorageIO.isPublicStore(DataAccess.getStorageDriverFromIdentifier(file.getStorageIdentifier()))); } + + //Allows use of fileDownloadHelper in file.xhtml + public FileDownloadHelper getFileDownloadHelper() { + return fileDownloadHelper; + } + + public void setFileDownloadHelper(FileDownloadHelper fileDownloadHelper) { + this.fileDownloadHelper = fileDownloadHelper; + } } From 5e1693dc3ee071ee24978bab7682ad6193daabcd Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 10 Jul 2023 10:57:10 -0400 Subject: [PATCH 0487/1525] don't show system blocks for edit in templates --- .../iq/dataverse/ManageTemplatesPage.java | 5 ++++- .../java/edu/harvard/iq/dataverse/Template.java | 16 +++++++++------- .../edu/harvard/iq/dataverse/TemplatePage.java | 5 ++++- 3 files changed, 17 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java index 4578a01e693..37ee7948a14 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java @@ -60,6 +60,9 @@ public class ManageTemplatesPage implements java.io.Serializable { @Inject LicenseServiceBean licenseServiceBean; + + @Inject + SettingsWrapper settingsWrapper; private List