From 5b3406551b2385abe4efa1b6320243d64de54030 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 29 Sep 2022 11:47:27 +0200 Subject: [PATCH 001/396] added api-direct-upload option for storage configurations --- doc/release-notes/4.20-release-notes.md | 11 +++++++++-- doc/sphinx-guides/source/installation/config.rst | 1 + .../harvard/iq/dataverse/dataaccess/StorageIO.java | 3 ++- .../java/edu/harvard/iq/dataverse/util/FileUtil.java | 3 ++- .../edu/harvard/iq/dataverse/util/SystemConfig.java | 3 ++- 5 files changed, 16 insertions(+), 5 deletions(-) diff --git a/doc/release-notes/4.20-release-notes.md b/doc/release-notes/4.20-release-notes.md index e29953db101..8044047134f 100644 --- a/doc/release-notes/4.20-release-notes.md +++ b/doc/release-notes/4.20-release-notes.md @@ -90,10 +90,16 @@ Also note that the :MaxFileUploadSizeInBytes property has a new option to provid ### Direct S3 Upload Changes -Direct upload to S3 is enabled per store by one new jvm option: +Direct upload to S3 in UI and API is enabled per store by one new jvm option: ./asadmin create-jvm-options "\-Ddataverse.files..upload-redirect=true" - + +That option makes direct upload by default in UI, in the API you can use either: direct or through Dataverse upload. Direct upload to S3 in API only is enabled per store by this new jvm option: + + ./asadmin create-jvm-options "\-Ddataverse.files..api-direct-upload=true" + +That option leaves through Dataverse upload by default in UI, but makes both: through Dataverse and direct uploads possible via API. + The existing :MaxFileUploadSizeInBytes property and ```dataverse.files..url-expiration-minutes``` jvm option for the same store also apply to direct upload. Direct upload via the Dataverse web interface is transparent to the user and handled automatically by the browser. Some minor differences in file upload exist: directly uploaded files are not unzipped and Dataverse does not scan their content to help in assigning a MIME type. Ingest of tabular files and metadata extraction from FITS files will occur, but can be turned off for files above a specified size limit through the new dataverse.files..ingestsizelimit jvm option. @@ -127,6 +133,7 @@ We made changes to the JSON Export in this release (Issue 6650, PR #6669). If yo - The JVM option dataverse.files.file.directory= controls where temporary files are stored (in the /temp subdir of the defined directory), independent of the location of any 'file' store defined above. - The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset to the S3 bucket. (S3 stores only!) +- The JVM option dataverse.files..api-direct-upload enables direct upload of files added to a dataset to any storage. (Through API only and when the uploading tool has direct access to the storage used; i.e., uplad the file first and register it through API!) - The JVM option dataverse.files..MaxFileUploadSizeInBytes controls the maximum size of file uploads allowed for the given file store. - The JVM option dataverse.files..ingestsizelimit controls the maximum size of files for which ingest will be attempted, for the given file store. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index f2de9d5702f..2b605ae8945 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -546,6 +546,7 @@ List of S3 Storage Options dataverse.files..bucket-name The bucket name. See above. (none) dataverse.files..download-redirect ``true``/``false`` Enable direct download or proxy through Dataverse. ``false`` dataverse.files..upload-redirect ``true``/``false`` Enable direct upload of files added to a dataset to the S3 store. ``false`` + dataverse.files..api-direct-upload ``true``/``false`` Enable direct upload of files added to a dataset through API only. ``false`` dataverse.files..ingestsizelimit Maximum size of directupload files that should be ingested (none) dataverse.files..url-expiration-minutes If direct uploads/downloads: time until links expire. Optional. 60 dataverse.files..min-part-size Multipart direct uploads will occur for files larger than this. Optional. ``1024**3`` diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 90e4a54dbe8..8e2dd9fa961 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -605,7 +605,8 @@ public static String getDriverPrefix(String driverId) { } public static boolean isDirectUploadEnabled(String driverId) { - return Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")); + return Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".api-direct-upload")); } //Check that storageIdentifier is consistent with store's config diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 339de904f9e..0a41da4f7dd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1748,7 +1748,8 @@ public static boolean isPackageFile(DataFile dataFile) { public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) { String driverId = dataset.getEffectiveStorageDriverId(); - boolean directEnabled = Boolean.getBoolean("dataverse.files." + driverId + ".upload-redirect"); + boolean directEnabled = Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".api-direct-upload")); //Should only be requested when it is allowed, but we'll log a warning otherwise if(!directEnabled) { logger.warning("Direct upload not supported for files in this dataset: " + dataset.getId()); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 7abd0d02065..4553a71a1d2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1112,7 +1112,8 @@ public boolean isDatafileValidationOnPublishEnabled() { } public boolean directUploadEnabled(DvObjectContainer container) { - return Boolean.getBoolean("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect"); + // this method is used in UI only, therfore "dataverse.files." + driverId + ".api-direct-upload" is not used here + return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); } public String getDataCiteRestApiUrlString() { From 5db560e999454a0b974215c6d9bc8373d4595fc0 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 29 Sep 2022 13:43:33 +0200 Subject: [PATCH 002/396] improvements in the documentation --- doc/release-notes/4.20-release-notes.md | 8 ++++---- doc/sphinx-guides/source/installation/config.rst | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/release-notes/4.20-release-notes.md b/doc/release-notes/4.20-release-notes.md index 8044047134f..79037d8cd8c 100644 --- a/doc/release-notes/4.20-release-notes.md +++ b/doc/release-notes/4.20-release-notes.md @@ -94,11 +94,11 @@ Direct upload to S3 in UI and API is enabled per store by one new jvm option: ./asadmin create-jvm-options "\-Ddataverse.files..upload-redirect=true" -That option makes direct upload by default in UI, in the API you can use either: direct or through Dataverse upload. Direct upload to S3 in API only is enabled per store by this new jvm option: +This option makes direct upload the default in the UI. In the API, you can use either: direct upload or upload via Dataverse upload. Direct upload to S3 in API only is enabled per store by this new jvm option: ./asadmin create-jvm-options "\-Ddataverse.files..api-direct-upload=true" -That option leaves through Dataverse upload by default in UI, but makes both: through Dataverse and direct uploads possible via API. +That option leaves via Dataverse upload by default in UI, but makes both: uploads via Dataverse and direct uploads possible via API. The existing :MaxFileUploadSizeInBytes property and ```dataverse.files..url-expiration-minutes``` jvm option for the same store also apply to direct upload. @@ -132,8 +132,8 @@ We made changes to the JSON Export in this release (Issue 6650, PR #6669). If yo ## New JVM Options for file storage drivers - The JVM option dataverse.files.file.directory= controls where temporary files are stored (in the /temp subdir of the defined directory), independent of the location of any 'file' store defined above. -- The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset to the S3 bucket. (S3 stores only!) -- The JVM option dataverse.files..api-direct-upload enables direct upload of files added to a dataset to any storage. (Through API only and when the uploading tool has direct access to the storage used; i.e., uplad the file first and register it through API!) +- The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset in the S3 bucket. (S3 stores only!) +- The JVM option dataverse.files..api-direct-upload enables direct upload of files added to a dataset in any storage. (Via API only and when the uploading tool has direct access to the relevant storage used; i.e., upload the file first and register it via API!) - The JVM option dataverse.files..MaxFileUploadSizeInBytes controls the maximum size of file uploads allowed for the given file store. - The JVM option dataverse.files..ingestsizelimit controls the maximum size of files for which ingest will be attempted, for the given file store. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 2b605ae8945..3245aeccfaf 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -545,8 +545,8 @@ List of S3 Storage Options dataverse.files..label **Required** label to be shown in the UI for this storage (none) dataverse.files..bucket-name The bucket name. See above. (none) dataverse.files..download-redirect ``true``/``false`` Enable direct download or proxy through Dataverse. ``false`` - dataverse.files..upload-redirect ``true``/``false`` Enable direct upload of files added to a dataset to the S3 store. ``false`` - dataverse.files..api-direct-upload ``true``/``false`` Enable direct upload of files added to a dataset through API only. ``false`` + dataverse.files..upload-redirect ``true``/``false`` Enable direct upload of files added to a dataset in the S3 store. ``false`` + dataverse.files..api-direct-upload ``true``/``false`` Enable direct upload of files added to a dataset via API only. ``false`` dataverse.files..ingestsizelimit Maximum size of directupload files that should be ingested (none) dataverse.files..url-expiration-minutes If direct uploads/downloads: time until links expire. Optional. 60 dataverse.files..min-part-size Multipart direct uploads will occur for files larger than this. Optional. ``1024**3`` From cbc42d5052f8a9afc30121082a44c128387e2023 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Tue, 8 Nov 2022 14:07:32 +0100 Subject: [PATCH 003/396] renamed and moved the direct upload JVM option in the documentation --- doc/release-notes/4.20-release-notes.md | 7 +------ doc/sphinx-guides/source/installation/config.rst | 7 ++++++- .../edu/harvard/iq/dataverse/dataaccess/StorageIO.java | 2 +- src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java | 2 +- .../java/edu/harvard/iq/dataverse/util/SystemConfig.java | 2 +- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/doc/release-notes/4.20-release-notes.md b/doc/release-notes/4.20-release-notes.md index 79037d8cd8c..ec52b638274 100644 --- a/doc/release-notes/4.20-release-notes.md +++ b/doc/release-notes/4.20-release-notes.md @@ -90,14 +90,10 @@ Also note that the :MaxFileUploadSizeInBytes property has a new option to provid ### Direct S3 Upload Changes -Direct upload to S3 in UI and API is enabled per store by one new jvm option: +Direct upload to S3 is enabled per store by one new jvm option: ./asadmin create-jvm-options "\-Ddataverse.files..upload-redirect=true" -This option makes direct upload the default in the UI. In the API, you can use either: direct upload or upload via Dataverse upload. Direct upload to S3 in API only is enabled per store by this new jvm option: - - ./asadmin create-jvm-options "\-Ddataverse.files..api-direct-upload=true" - That option leaves via Dataverse upload by default in UI, but makes both: uploads via Dataverse and direct uploads possible via API. The existing :MaxFileUploadSizeInBytes property and ```dataverse.files..url-expiration-minutes``` jvm option for the same store also apply to direct upload. @@ -133,7 +129,6 @@ We made changes to the JSON Export in this release (Issue 6650, PR #6669). If yo - The JVM option dataverse.files.file.directory= controls where temporary files are stored (in the /temp subdir of the defined directory), independent of the location of any 'file' store defined above. - The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset in the S3 bucket. (S3 stores only!) -- The JVM option dataverse.files..api-direct-upload enables direct upload of files added to a dataset in any storage. (Via API only and when the uploading tool has direct access to the relevant storage used; i.e., upload the file first and register it via API!) - The JVM option dataverse.files..MaxFileUploadSizeInBytes controls the maximum size of file uploads allowed for the given file store. - The JVM option dataverse.files..ingestsizelimit controls the maximum size of files for which ingest will be attempted, for the given file store. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 2e68bfaa1ab..4f15ad81190 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -249,6 +249,12 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. +When using integration tools, dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.allow-out-of-band-upload`` JVM option to ``true``. +Files can be then uploaded by an integration tool with ``datasets/{id}/add`` api call, or uploaded directly to the storage and registerd in a dataset afterwards using the ``datasets/{id}/addFiles`` api call. +Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``allow-out-of-band-upload`` and will enable direct upload even with ``allow-out-of-band-upload`` not set (or set to false). +In other words, ``dataverse.files.\.allow-out-of-band-upload`` option opens the ``datasets/{id}/add`` and ``datasets/{id}/addFiles`` api endpoints without redirecting uploads in the UI. +Enabling the ``upload-redirect`` option allows then direct upload automatically, without the need of enabling the ``allow-out-of-band-upload`` (setting it to ``false`` does not have any effect in that case). + The following sections describe how to set up various types of stores and how to configure for multiple stores. Multi-store Basics @@ -546,7 +552,6 @@ List of S3 Storage Options dataverse.files..bucket-name The bucket name. See above. (none) dataverse.files..download-redirect ``true``/``false`` Enable direct download or proxy through Dataverse. ``false`` dataverse.files..upload-redirect ``true``/``false`` Enable direct upload of files added to a dataset in the S3 store. ``false`` - dataverse.files..api-direct-upload ``true``/``false`` Enable direct upload of files added to a dataset via API only. ``false`` dataverse.files..ingestsizelimit Maximum size of directupload files that should be ingested (none) dataverse.files..url-expiration-minutes If direct uploads/downloads: time until links expire. Optional. 60 dataverse.files..min-part-size Multipart direct uploads will occur for files larger than this. Optional. ``1024**3`` diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 8e2dd9fa961..a2ff546ef0a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -606,7 +606,7 @@ public static String getDriverPrefix(String driverId) { public static boolean isDirectUploadEnabled(String driverId) { return Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || - Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".api-direct-upload")); + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".allow-out-of-band-upload")); } //Check that storageIdentifier is consistent with store's config diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 0a41da4f7dd..db82df72b8a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1749,7 +1749,7 @@ public static boolean isPackageFile(DataFile dataFile) { public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) { String driverId = dataset.getEffectiveStorageDriverId(); boolean directEnabled = Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || - Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".api-direct-upload")); + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".allow-out-of-band-upload")); //Should only be requested when it is allowed, but we'll log a warning otherwise if(!directEnabled) { logger.warning("Direct upload not supported for files in this dataset: " + dataset.getId()); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 4585d99a01f..b040f557895 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1112,7 +1112,7 @@ public boolean isDatafileValidationOnPublishEnabled() { } public boolean directUploadEnabled(DvObjectContainer container) { - // this method is used in UI only, therfore "dataverse.files." + driverId + ".api-direct-upload" is not used here + // this method is used in UI only, therfore "dataverse.files." + driverId + ".allow-out-of-band-upload" is not used here return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); } From 4abac1ac15d77f2f059977254971cf4be0f3f1f1 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 09:40:34 +0100 Subject: [PATCH 004/396] revert by accident editted old release notes --- doc/release-notes/4.20-release-notes.md | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/doc/release-notes/4.20-release-notes.md b/doc/release-notes/4.20-release-notes.md index ec52b638274..e29953db101 100644 --- a/doc/release-notes/4.20-release-notes.md +++ b/doc/release-notes/4.20-release-notes.md @@ -93,9 +93,7 @@ Also note that the :MaxFileUploadSizeInBytes property has a new option to provid Direct upload to S3 is enabled per store by one new jvm option: ./asadmin create-jvm-options "\-Ddataverse.files..upload-redirect=true" - -That option leaves via Dataverse upload by default in UI, but makes both: uploads via Dataverse and direct uploads possible via API. - + The existing :MaxFileUploadSizeInBytes property and ```dataverse.files..url-expiration-minutes``` jvm option for the same store also apply to direct upload. Direct upload via the Dataverse web interface is transparent to the user and handled automatically by the browser. Some minor differences in file upload exist: directly uploaded files are not unzipped and Dataverse does not scan their content to help in assigning a MIME type. Ingest of tabular files and metadata extraction from FITS files will occur, but can be turned off for files above a specified size limit through the new dataverse.files..ingestsizelimit jvm option. @@ -128,7 +126,7 @@ We made changes to the JSON Export in this release (Issue 6650, PR #6669). If yo ## New JVM Options for file storage drivers - The JVM option dataverse.files.file.directory= controls where temporary files are stored (in the /temp subdir of the defined directory), independent of the location of any 'file' store defined above. -- The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset in the S3 bucket. (S3 stores only!) +- The JVM option dataverse.files..upload-redirect enables direct upload of files added to a dataset to the S3 bucket. (S3 stores only!) - The JVM option dataverse.files..MaxFileUploadSizeInBytes controls the maximum size of file uploads allowed for the given file store. - The JVM option dataverse.files..ingestsizelimit controls the maximum size of files for which ingest will be attempted, for the given file store. From 578c7af84e7cd1eac52901643d9bb49bc878cfa3 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 10:05:20 +0100 Subject: [PATCH 005/396] indentation fixes --- .../iq/dataverse/util/SystemConfig.java | 407 +++++++++--------- 1 file changed, 205 insertions(+), 202 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index b040f557895..f3d8e46b004 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -59,8 +59,8 @@ public class SystemConfig { @EJB AuthenticationServiceBean authenticationService; - - public static final String DATAVERSE_PATH = "/dataverse/"; + + public static final String DATAVERSE_PATH = "/dataverse/"; /** * A JVM option for the advertised fully qualified domain name (hostname) of @@ -70,11 +70,11 @@ public class SystemConfig { * The equivalent in DVN 3.x was "dvn.inetAddress". */ public static final String FQDN = "dataverse.fqdn"; - + /** * A JVM option for specifying the "official" URL of the site. - * Unlike the FQDN option above, this would be a complete URL, - * with the protocol, port number etc. + * Unlike the FQDN option above, this would be a complete URL, + * with the protocol, port number etc. */ public static final String SITE_URL = "dataverse.siteUrl"; @@ -102,41 +102,41 @@ public class SystemConfig { private String saneDefaultForSolrHostColonPort = "localhost:8983"; /** - * The default number of datafiles that we allow to be created through + * The default number of datafiles that we allow to be created through * zip file upload. */ - private static final int defaultZipUploadFilesLimit = 1000; + private static final int defaultZipUploadFilesLimit = 1000; public static final long defaultZipDownloadLimit = 104857600L; // 100MB private static final int defaultMultipleUploadFilesLimit = 1000; private static final int defaultLoginSessionTimeout = 480; // = 8 hours - private static String appVersionString = null; - private static String buildNumberString = null; - + private static String appVersionString = null; + private static String buildNumberString = null; + private static final String JVM_TIMER_SERVER_OPTION = "dataverse.timerServer"; - - private static final long DEFAULT_GUESTBOOK_RESPONSES_DISPLAY_LIMIT = 5000L; + + private static final long DEFAULT_GUESTBOOK_RESPONSES_DISPLAY_LIMIT = 5000L; private static final long DEFAULT_THUMBNAIL_SIZE_LIMIT_IMAGE = 3000000L; // 3 MB private static final long DEFAULT_THUMBNAIL_SIZE_LIMIT_PDF = 1000000L; // 1 MB - + public final static String DEFAULTCURATIONLABELSET = "DEFAULT"; public final static String CURATIONLABELSDISABLED = "DISABLED"; - + public String getVersion() { return getVersion(false); } - + // The return value is a "prviate static String", that should be initialized - // once, on the first call (see the code below)... But this is a @Stateless - // bean... so that would mean "once per thread"? - this would be a prime + // once, on the first call (see the code below)... But this is a @Stateless + // bean... so that would mean "once per thread"? - this would be a prime // candidate for being moved into some kind of an application-scoped caching // service... some CachingService @Singleton - ? (L.A. 5.8) public String getVersion(boolean withBuildNumber) { - + if (appVersionString == null) { // The Version Number is no longer supplied in a .properties file - so - // we can't just do + // we can't just do // return BundleUtil.getStringFromBundle("version.number", null, ResourceBundle.getBundle("VersionNumber", Locale.US)); // // Instead, we'll rely on Maven placing the version number into the @@ -144,31 +144,31 @@ public String getVersion(boolean withBuildNumber) { // (this is considered a better practice, and will also allow us // to maintain this number in only one place - the pom.xml file) // -- L.A. 4.0.2 - - // One would assume, that once the version is in the MANIFEST.MF, - // as Implementation-Version:, it would be possible to obtain - // said version simply as + + // One would assume, that once the version is in the MANIFEST.MF, + // as Implementation-Version:, it would be possible to obtain + // said version simply as // appVersionString = getClass().getPackage().getImplementationVersion(); - // alas - that's not working, for whatever reason. (perhaps that's + // alas - that's not working, for whatever reason. (perhaps that's // only how it works with jar-ed packages; not with .war files). - // People on the interwebs suggest that one should instead - // open the Manifest as a resource, then extract its attributes. - // There were some complications with that too. Plus, relying solely - // on the MANIFEST.MF would NOT work for those of the developers who - // are using "in place deployment" (i.e., where - // Netbeans runs their builds directly from the local target - // directory, bypassing the war file deployment; and the Manifest - // is only available in the .war file). For that reason, I am - // going to rely on the pom.properties file, and use java.util.Properties + // People on the interwebs suggest that one should instead + // open the Manifest as a resource, then extract its attributes. + // There were some complications with that too. Plus, relying solely + // on the MANIFEST.MF would NOT work for those of the developers who + // are using "in place deployment" (i.e., where + // Netbeans runs their builds directly from the local target + // directory, bypassing the war file deployment; and the Manifest + // is only available in the .war file). For that reason, I am + // going to rely on the pom.properties file, and use java.util.Properties // to read it. We have to look for this file in 2 different places - // depending on whether this is a .war file deployment, or a + // depending on whether this is a .war file deployment, or a // developers build. (the app-level META-INF is only populated when - // a .war file is built; the "maven-archiver" directory, on the other + // a .war file is built; the "maven-archiver" directory, on the other // hand, is only available when it's a local build deployment). - // So, long story short, I'm resorting to the convoluted steps below. - // It may look hacky, but it should actually be pretty solid and - // reliable. - + // So, long story short, I'm resorting to the convoluted steps below. + // It may look hacky, but it should actually be pretty solid and + // reliable. + // First, find the absolute path url of the application persistence file // always supplied with the Dataverse app: @@ -180,46 +180,46 @@ public String getVersion(boolean withBuildNumber) { filePath = fileUrl.getFile(); if (filePath != null) { InputStream mavenPropertiesInputStream = null; - String mavenPropertiesFilePath; + String mavenPropertiesFilePath; Properties mavenProperties = new Properties(); filePath = filePath.replaceFirst("/[^/]*$", "/"); - // Using a relative path, find the location of the maven pom.properties file. - // First, try to look for it in the app-level META-INF. This will only be - // available if it's a war file deployment: + // Using a relative path, find the location of the maven pom.properties file. + // First, try to look for it in the app-level META-INF. This will only be + // available if it's a war file deployment: mavenPropertiesFilePath = filePath.concat("../../../META-INF/maven/edu.harvard.iq/dataverse/pom.properties"); - + try { mavenPropertiesInputStream = new FileInputStream(mavenPropertiesFilePath); } catch (IOException ioex) { - // OK, let's hope this is a local dev. build. - // In that case the properties file should be available in - // the maven-archiver directory: - + // OK, let's hope this is a local dev. build. + // In that case the properties file should be available in + // the maven-archiver directory: + mavenPropertiesFilePath = filePath.concat("../../../../maven-archiver/pom.properties"); - - // try again: - + + // try again: + try { mavenPropertiesInputStream = new FileInputStream(mavenPropertiesFilePath); } catch (IOException ioex2) { logger.warning("Failed to find and/or open for reading the pom.properties file."); - mavenPropertiesInputStream = null; + mavenPropertiesInputStream = null; } } - + if (mavenPropertiesInputStream != null) { try { mavenProperties.load(mavenPropertiesInputStream); - appVersionString = mavenProperties.getProperty("version"); + appVersionString = mavenProperties.getProperty("version"); } catch (IOException ioex) { logger.warning("caught IOException trying to read and parse the pom properties file."); } finally { IOUtils.closeQuietly(mavenPropertiesInputStream); } } - + } else { logger.warning("Null file path representation of the location of persistence.xml in the webapp root directory!"); } @@ -229,53 +229,54 @@ public String getVersion(boolean withBuildNumber) { if (appVersionString == null) { - // still null? - defaulting to 4.0: + // still null? - defaulting to 4.0: appVersionString = "4.0"; } } - + if (withBuildNumber) { if (buildNumberString == null) { - // (build number is still in a .properties file in the source tree; it only - // contains a real build number if this war file was built by - // Jenkins) - + // (build number is still in a .properties file in the source tree; it only + // contains a real build number if this war file was built by + // Jenkins) + try { buildNumberString = ResourceBundle.getBundle("BuildNumber").getString("build.number"); } catch (MissingResourceException ex) { - buildNumberString = null; + buildNumberString = null; } } - + if (buildNumberString != null && !buildNumberString.equals("")) { - return appVersionString + " build " + buildNumberString; - } - } - - return appVersionString; + return appVersionString + " build " + buildNumberString; + } + } + + return appVersionString; } public String getSolrHostColonPort() { String SolrHost; if ( System.getenv("SOLR_SERVICE_HOST") != null && System.getenv("SOLR_SERVICE_HOST") != ""){ SolrHost = System.getenv("SOLR_SERVICE_HOST"); + } else { + SolrHost = saneDefaultForSolrHostColonPort; } - else SolrHost = saneDefaultForSolrHostColonPort; String solrHostColonPort = settingsService.getValueForKey(SettingsServiceBean.Key.SolrHostColonPort, SolrHost); return solrHostColonPort; } public boolean isProvCollectionEnabled() { String provCollectionEnabled = settingsService.getValueForKey(SettingsServiceBean.Key.ProvCollectionEnabled, null); - if("true".equalsIgnoreCase(provCollectionEnabled)){ + if ("true".equalsIgnoreCase(provCollectionEnabled)) { return true; } return false; } - + public int getMetricsCacheTimeoutMinutes() { - int defaultValue = 10080; //one week in minutes + int defaultValue = 10080; // one week in minutes SettingsServiceBean.Key key = SettingsServiceBean.Key.MetricsCacheTimeoutMinutes; String metricsCacheTimeString = settingsService.getValueForKey(key); if (metricsCacheTimeString != null) { @@ -293,7 +294,7 @@ public int getMetricsCacheTimeoutMinutes() { } return defaultValue; } - + public int getMinutesUntilConfirmEmailTokenExpires() { final int minutesInOneDay = 1440; final int reasonableDefault = minutesInOneDay; @@ -338,10 +339,10 @@ public static int getMinutesUntilPasswordResetTokenExpires() { } return reasonableDefault; } - + /** * The "official", designated URL of the site; - * can be defined as a complete URL; or derived from the + * can be defined as a complete URL; or derived from the * "official" hostname. If none of these options is set, * defaults to the InetAddress.getLocalHOst() and https; * These are legacy JVM options. Will be eventualy replaced @@ -350,7 +351,7 @@ public static int getMinutesUntilPasswordResetTokenExpires() { public String getDataverseSiteUrl() { return getDataverseSiteUrlStatic(); } - + public static String getDataverseSiteUrlStatic() { String hostUrl = System.getProperty(SITE_URL); if (hostUrl != null && !"".equals(hostUrl)) { @@ -367,19 +368,20 @@ public static String getDataverseSiteUrlStatic() { hostUrl = "https://" + hostName; return hostUrl; } - + /** - * URL Tracking: + * URL Tracking: */ public String getPageURLWithQueryString() { - return PrettyContext.getCurrentInstance().getRequestURL().toURL() + PrettyContext.getCurrentInstance().getRequestQueryString().toQueryString(); + return PrettyContext.getCurrentInstance().getRequestURL().toURL() + + PrettyContext.getCurrentInstance().getRequestQueryString().toQueryString(); } /** - * The "official" server's fully-qualified domain name: + * The "official" server's fully-qualified domain name: */ public String getDataverseServer() { - // still reliese on a JVM option: + // still reliese on a JVM option: String fqdn = System.getProperty(FQDN); if (fqdn == null) { try { @@ -447,44 +449,44 @@ public static int getIntLimitFromStringOrDefault(String limitSetting, Integer de /** * Download-as-zip size limit. - * returns defaultZipDownloadLimit if not specified; - * set to -1 to disable zip downloads. + * returns defaultZipDownloadLimit if not specified; + * set to -1 to disable zip downloads. */ public long getZipDownloadLimit() { String zipLimitOption = settingsService.getValueForKey(SettingsServiceBean.Key.ZipDownloadLimit); return getLongLimitFromStringOrDefault(zipLimitOption, defaultZipDownloadLimit); } - + public int getZipUploadFilesLimit() { String limitOption = settingsService.getValueForKey(SettingsServiceBean.Key.ZipUploadFilesLimit); return getIntLimitFromStringOrDefault(limitOption, defaultZipUploadFilesLimit); } - + /** - * Session timeout, in minutes. + * Session timeout, in minutes. * (default value provided) */ public int getLoginSessionTimeout() { return getIntLimitFromStringOrDefault( - settingsService.getValueForKey(SettingsServiceBean.Key.LoginSessionTimeout), - defaultLoginSessionTimeout); + settingsService.getValueForKey(SettingsServiceBean.Key.LoginSessionTimeout), + defaultLoginSessionTimeout); } - + /* ` the number of files the GUI user is allowed to upload in one batch, via drag-and-drop, or through the file select dialog - */ + */ public int getMultipleUploadFilesLimit() { String limitOption = settingsService.getValueForKey(SettingsServiceBean.Key.MultipleUploadFilesLimit); return getIntLimitFromStringOrDefault(limitOption, defaultMultipleUploadFilesLimit); } - + public long getGuestbookResponsesPageDisplayLimit() { String limitSetting = settingsService.getValueForKey(SettingsServiceBean.Key.GuestbookResponsesPageDisplayLimit); return getLongLimitFromStringOrDefault(limitSetting, DEFAULT_GUESTBOOK_RESPONSES_DISPLAY_LIMIT); } - - public long getUploadLogoSizeLimit(){ + + public long getUploadLogoSizeLimit() { return 500000; } @@ -497,10 +499,10 @@ public long getThumbnailSizeLimitPDF() { } public static long getThumbnailSizeLimit(String type) { - String option = null; - - //get options via jvm options - + String option = null; + + // get options via jvm options + if ("Image".equals(type)) { option = System.getProperty("dataverse.dataAccess.thumbnail.image.limit"); return getLongLimitFromStringOrDefault(option, DEFAULT_THUMBNAIL_SIZE_LIMIT_IMAGE); @@ -512,19 +514,19 @@ public static long getThumbnailSizeLimit(String type) { // Zero (0) means no limit. return getLongLimitFromStringOrDefault(option, 0L); } - + public boolean isThumbnailGenerationDisabledForType(String type) { return getThumbnailSizeLimit(type) == -1l; } - + public boolean isThumbnailGenerationDisabledForImages() { return isThumbnailGenerationDisabledForType("Image"); } - + public boolean isThumbnailGenerationDisabledForPDF() { return isThumbnailGenerationDisabledForType("PDF"); } - + public String getApplicationTermsOfUse() { String language = BundleUtil.getCurrentLocale().getLanguage(); String saneDefaultForAppTermsOfUse = BundleUtil.getStringFromBundle("system.app.terms"); @@ -532,9 +534,9 @@ public String getApplicationTermsOfUse() { // value, or as a better default than the saneDefaultForAppTermsOfUse if there // is no language-specific value String appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, saneDefaultForAppTermsOfUse); - //Now get the language-specific value if it exists + // Now get the language-specific value if it exists if (language != null && !language.equalsIgnoreCase(BundleUtil.getDefaultLocale().getLanguage())) { - appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, language, appTermsOfUse); + appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, language, appTermsOfUse); } return appTermsOfUse; } @@ -545,7 +547,7 @@ public String getApiTermsOfUse() { return apiTermsOfUse; } - // TODO: + // TODO: // remove this method! // pages should be using settingsWrapper.get(":ApplicationPrivacyPolicyUrl") instead. -- 4.2.1 public String getApplicationPrivacyPolicyUrl() { @@ -564,10 +566,10 @@ public boolean isFilesOnDatasetPageFromSolr() { return settingsService.isTrueForKey(SettingsServiceBean.Key.FilesOnDatasetPageFromSolr, safeDefaultIfKeyNotFound); } - public Long getMaxFileUploadSizeForStore(String driverId){ - return settingsService.getValueForCompoundKeyAsLong(SettingsServiceBean.Key.MaxFileUploadSizeInBytes, driverId); - } - + public Long getMaxFileUploadSizeForStore(String driverId) { + return settingsService.getValueForCompoundKeyAsLong(SettingsServiceBean.Key.MaxFileUploadSizeInBytes, driverId); + } + public Integer getSearchHighlightFragmentSize() { String fragSize = settingsService.getValueForKey(SettingsServiceBean.Key.SearchHighlightFragmentSize); if (fragSize != null) { @@ -581,12 +583,12 @@ public Integer getSearchHighlightFragmentSize() { } public long getTabularIngestSizeLimit() { - // This method will return the blanket ingestable size limit, if - // set on the system. I.e., the universal limit that applies to all - // tabular ingests, regardless of fromat: - - String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.TabularIngestSizeLimit); - + // This method will return the blanket ingestable size limit, if + // set on the system. I.e., the universal limit that applies to all + // tabular ingests, regardless of fromat: + + String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.TabularIngestSizeLimit); + if (limitEntry != null) { try { Long sizeOption = new Long(limitEntry); @@ -595,48 +597,48 @@ public long getTabularIngestSizeLimit() { logger.warning("Invalid value for TabularIngestSizeLimit option? - " + limitEntry); } } - // -1 means no limit is set; - // 0 on the other hand would mean that ingest is fully disabled for - // tabular data. - return -1; + // -1 means no limit is set; + // 0 on the other hand would mean that ingest is fully disabled for + // tabular data. + return -1; } - + public long getTabularIngestSizeLimit(String formatName) { // This method returns the size limit set specifically for this format name, - // if available, otherwise - the blanket limit that applies to all tabular - // ingests regardless of a format. - + // if available, otherwise - the blanket limit that applies to all tabular + // ingests regardless of a format. + if (formatName == null || formatName.equals("")) { - return getTabularIngestSizeLimit(); + return getTabularIngestSizeLimit(); } - + String limitEntry = settingsService.get(SettingsServiceBean.Key.TabularIngestSizeLimit.toString() + ":" + formatName); - + if (limitEntry != null) { try { Long sizeOption = new Long(limitEntry); return sizeOption; } catch (NumberFormatException nfe) { - logger.warning("Invalid value for TabularIngestSizeLimit:" + formatName + "? - " + limitEntry ); + logger.warning("Invalid value for TabularIngestSizeLimit:" + formatName + "? - " + limitEntry); } } - - return getTabularIngestSizeLimit(); + + return getTabularIngestSizeLimit(); } public boolean isOAIServerEnabled() { boolean defaultResponse = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.OAIServerEnabled, defaultResponse); } - + public void enableOAIServer() { settingsService.setValueForKey(SettingsServiceBean.Key.OAIServerEnabled, "true"); } - + public void disableOAIServer() { settingsService.deleteValueForKey(SettingsServiceBean.Key.OAIServerEnabled); - } - + } + public boolean isTimerServer() { String optionValue = System.getProperty(JVM_TIMER_SERVER_OPTION); if ("true".equalsIgnoreCase(optionValue)) { @@ -704,11 +706,12 @@ public String getOAuth2CallbackUrl() { } return saneDefault; } - + public boolean isShibPassiveLoginEnabled() { boolean defaultResponse = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.ShibPassiveLoginEnabled, defaultResponse); } + public boolean isShibAttributeCharacterSetConversionEnabled() { boolean defaultResponse = true; return settingsService.isTrueForKey(SettingsServiceBean.Key.ShibAttributeCharacterSetConversionEnabled, defaultResponse); @@ -734,7 +737,7 @@ public String getPVDictionaries() { public int getPVGoodStrength() { // FIXME: Change this to 21 to match Harvard's requirements or implement a way to disable the rule (0 or -1) and have the default be disabled. int goodStrengthLength = 20; - //String _goodStrengthLength = System.getProperty("pv.goodstrength", settingsService.get(SettingsServiceBean.Key.PVGoodStrength.toString())); + // String _goodStrengthLength = System.getProperty("pv.goodstrength", settingsService.get(SettingsServiceBean.Key.PVGoodStrength.toString())); String _goodStrengthLength = settingsService.get(SettingsServiceBean.Key.PVGoodStrength.toString()); try { goodStrengthLength = Integer.parseInt(_goodStrengthLength); @@ -862,9 +865,7 @@ public enum FileUploadMethods { * Upload through Globus of large files */ - GLOBUS("globus") - ; - + GLOBUS("globus"); private final String text; @@ -887,8 +888,7 @@ public static FileUploadMethods fromString(String text) { public String toString() { return text; } - - + } /** @@ -904,8 +904,8 @@ public enum FileDownloadMethods { */ RSYNC("rsal/rsync"), NATIVE("native/http"), - GLOBUS("globus") - ; + GLOBUS("globus"); + private final String text; private FileDownloadMethods(final String text) { @@ -927,27 +927,28 @@ public static FileUploadMethods fromString(String text) { public String toString() { return text; } - + } - + public enum DataFilePIDFormat { DEPENDENT("DEPENDENT"), INDEPENDENT("INDEPENDENT"); + private final String text; public String getText() { return text; } - - private DataFilePIDFormat(final String text){ + + private DataFilePIDFormat(final String text) { this.text = text; } - + @Override public String toString() { return text; } - + } /** @@ -987,44 +988,44 @@ public String toString() { } - public boolean isPublicInstall(){ + public boolean isPublicInstall() { boolean saneDefault = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.PublicInstall, saneDefault); } - - public boolean isRsyncUpload(){ + + public boolean isRsyncUpload() { return getMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString(), true); } - public boolean isGlobusUpload(){ + public boolean isGlobusUpload() { return getMethodAvailable(FileUploadMethods.GLOBUS.toString(), true); } // Controls if HTTP upload is enabled for both GUI and API. - public boolean isHTTPUpload(){ + public boolean isHTTPUpload() { return getMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString(), true); } - - public boolean isRsyncOnly(){ + + public boolean isRsyncOnly() { String downloadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.DownloadMethods); - if(downloadMethods == null){ + if (downloadMethods == null) { return false; } - if (!downloadMethods.toLowerCase().equals(SystemConfig.FileDownloadMethods.RSYNC.toString())){ + if (!downloadMethods.toLowerCase().equals(SystemConfig.FileDownloadMethods.RSYNC.toString())) { return false; } String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); - if (uploadMethods==null){ + if (uploadMethods == null) { return false; } else { - return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size() == 1 && uploadMethods.toLowerCase().equals(SystemConfig.FileUploadMethods.RSYNC.toString()); + return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size() == 1 && uploadMethods.toLowerCase().equals(SystemConfig.FileUploadMethods.RSYNC.toString()); } } - + public boolean isRsyncDownload() { return getMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString(), false); } - + public boolean isHTTPDownload() { return getMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString(), false); } @@ -1032,14 +1033,14 @@ public boolean isHTTPDownload() { public boolean isGlobusDownload() { return getMethodAvailable(FileUploadMethods.GLOBUS.toString(), false); } - + public boolean isGlobusFileDownload() { return (isGlobusDownload() && settingsService.isTrueForKey(SettingsServiceBean.Key.GlobusSingleFileTransfer, false)); } public List getGlobusStoresList() { - String globusStores = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusStores, ""); - return Arrays.asList(globusStores.split("\\s*,\\s*")); + String globusStores = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusStores, ""); + return Arrays.asList(globusStores.split("\\s*,\\s*")); } private Boolean getMethodAvailable(String method, boolean upload) { @@ -1051,31 +1052,32 @@ private Boolean getMethodAvailable(String method, boolean upload) { return Arrays.asList(methods.toLowerCase().split("\\s*,\\s*")).contains(method); } } - - public Integer getUploadMethodCount(){ - String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); - if (uploadMethods==null){ + + public Integer getUploadMethodCount() { + String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); + if (uploadMethods == null) { return 0; } else { - return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size(); - } + return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size(); + } } - public boolean isDataFilePIDSequentialDependent(){ + + public boolean isDataFilePIDSequentialDependent() { String doiIdentifierType = settingsService.getValueForKey(SettingsServiceBean.Key.IdentifierGenerationStyle, "randomString"); String doiDataFileFormat = settingsService.getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT"); - if (doiIdentifierType.equals("storedProcGenerated") && doiDataFileFormat.equals("DEPENDENT")){ + if (doiIdentifierType.equals("storedProcGenerated") && doiDataFileFormat.equals("DEPENDENT")) { return true; } return false; } - + public int getPIDAsynchRegFileCount() { String fileCount = settingsService.getValueForKey(SettingsServiceBean.Key.PIDAsynchRegFileCount, "10"); int retVal = 10; try { retVal = Integer.parseInt(fileCount); - } catch (NumberFormatException e) { - //if no number in the setting we'll return 10 + } catch (NumberFormatException e) { + // if no number in the setting we'll return 10 } return retVal; } @@ -1089,13 +1091,13 @@ public boolean isFilePIDsEnabled() { boolean safeDefaultIfKeyNotFound = true; return settingsService.isTrueForKey(SettingsServiceBean.Key.FilePIDsEnabled, safeDefaultIfKeyNotFound); } - + public boolean isIndependentHandleService() { boolean safeDefaultIfKeyNotFound = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.IndependentHandleService, safeDefaultIfKeyNotFound); - + } - + public String getHandleAuthHandle() { String handleAuthHandle = settingsService.getValueForKey(SettingsServiceBean.Key.HandleAuthHandle, null); return handleAuthHandle; @@ -1105,61 +1107,61 @@ public String getMDCLogPath() { String mDCLogPath = settingsService.getValueForKey(SettingsServiceBean.Key.MDCLogPath, null); return mDCLogPath; } - + public boolean isDatafileValidationOnPublishEnabled() { boolean safeDefaultIfKeyNotFound = true; return settingsService.isTrueForKey(SettingsServiceBean.Key.FileValidationOnPublishEnabled, safeDefaultIfKeyNotFound); } - public boolean directUploadEnabled(DvObjectContainer container) { + public boolean directUploadEnabled(DvObjectContainer container) { // this method is used in UI only, therfore "dataverse.files." + driverId + ".allow-out-of-band-upload" is not used here return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); - } - - public String getDataCiteRestApiUrlString() { - //As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. + } + + public String getDataCiteRestApiUrlString() { + // As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. return System.getProperty("doi.dataciterestapiurlstring", System.getProperty("doi.mdcbaseurlstring", "https://api.datacite.org")); - } - + } + public boolean isExternalDataverseValidationEnabled() { return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataValidatorScript) != null; - // alternatively, we can also check if the script specified exists, + // alternatively, we can also check if the script specified exists, // and is executable. -- ? } - + public boolean isExternalDatasetValidationEnabled() { return settingsService.getValueForKey(SettingsServiceBean.Key.DatasetMetadataValidatorScript) != null; - // alternatively, we can also check if the script specified exists, + // alternatively, we can also check if the script specified exists, // and is executable. -- ? } - + public String getDataverseValidationExecutable() { return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataValidatorScript); } - + public String getDatasetValidationExecutable() { return settingsService.getValueForKey(SettingsServiceBean.Key.DatasetMetadataValidatorScript); } - + public String getDataverseValidationFailureMsg() { String defaultMessage = "This dataverse collection cannot be published because it has failed an external metadata validation test."; return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataPublishValidationFailureMsg, defaultMessage); } - + public String getDataverseUpdateValidationFailureMsg() { String defaultMessage = "This dataverse collection cannot be updated because it has failed an external metadata validation test."; return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataUpdateValidationFailureMsg, defaultMessage); } - + public String getDatasetValidationFailureMsg() { String defaultMessage = "This dataset cannot be published because it has failed an external metadata validation test."; return settingsService.getValueForKey(SettingsServiceBean.Key.DatasetMetadataValidationFailureMsg, defaultMessage); } - + public boolean isExternalValidationAdminOverrideEnabled() { return "true".equalsIgnoreCase(settingsService.getValueForKey(SettingsServiceBean.Key.ExternalValidationAdminOverride)); } - + public long getDatasetValidationSizeLimit() { String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.DatasetChecksumValidationSizeLimit); @@ -1189,6 +1191,7 @@ public long getFileValidationSizeLimit() { // -1 means no limit is set; return -1; } + public Map getCurationLabels() { Map labelMap = new HashMap(); String setting = settingsService.getValueForKey(SettingsServiceBean.Key.AllowedCurationLabels, ""); @@ -1229,15 +1232,15 @@ public Map getCurationLabels() { } return labelMap; } - + public boolean isSignupDisabledForRemoteAuthProvider(String providerId) { - Boolean ret = settingsService.getValueForCompoundKeyAsBoolean(SettingsServiceBean.Key.AllowRemoteAuthSignUp, providerId); - - // we default to false - i.e., "not disabled" if the setting is not present: + Boolean ret = settingsService.getValueForCompoundKeyAsBoolean(SettingsServiceBean.Key.AllowRemoteAuthSignUp, providerId); + + // we default to false - i.e., "not disabled" if the setting is not present: if (ret == null) { - return false; + return false; } - - return !ret; + + return !ret; } } From 8578de173b63dbde3bb5440147422783621fbee9 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 10:10:57 +0100 Subject: [PATCH 006/396] tab character removed --- src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index f3d8e46b004..1edf5a0fb6e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1105,7 +1105,7 @@ public String getHandleAuthHandle() { public String getMDCLogPath() { String mDCLogPath = settingsService.getValueForKey(SettingsServiceBean.Key.MDCLogPath, null); - return mDCLogPath; + return mDCLogPath;this method is used } public boolean isDatafileValidationOnPublishEnabled() { @@ -1115,7 +1115,7 @@ public boolean isDatafileValidationOnPublishEnabled() { public boolean directUploadEnabled(DvObjectContainer container) { // this method is used in UI only, therfore "dataverse.files." + driverId + ".allow-out-of-band-upload" is not used here - return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); + return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); } public String getDataCiteRestApiUrlString() { From f2e75db13bcff1f5a5bc7d5cfc958db04be745c0 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 10:15:03 +0100 Subject: [PATCH 007/396] tab character removed --- src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 1edf5a0fb6e..0ab99c0de6a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1105,7 +1105,7 @@ public String getHandleAuthHandle() { public String getMDCLogPath() { String mDCLogPath = settingsService.getValueForKey(SettingsServiceBean.Key.MDCLogPath, null); - return mDCLogPath;this method is used + return mDCLogPath; } public boolean isDatafileValidationOnPublishEnabled() { From bff889d3864ca10f7dc4f7ae84595e40a2b70d34 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 10:25:39 +0100 Subject: [PATCH 008/396] tab character removed --- src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 0ab99c0de6a..7d7006e708e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1119,7 +1119,7 @@ public boolean directUploadEnabled(DvObjectContainer container) { } public String getDataCiteRestApiUrlString() { - // As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. + // As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. return System.getProperty("doi.dataciterestapiurlstring", System.getProperty("doi.mdcbaseurlstring", "https://api.datacite.org")); } From ad4bb5107fcb14b8c4ebb7f7fd57186511577548 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 11:51:31 +0100 Subject: [PATCH 009/396] renamed jvm option: allow-out-of-band-upload -> upload-out-of-band --- doc/sphinx-guides/source/installation/config.rst | 9 +++++---- .../edu/harvard/iq/dataverse/dataaccess/StorageIO.java | 2 +- .../java/edu/harvard/iq/dataverse/util/FileUtil.java | 2 +- .../java/edu/harvard/iq/dataverse/util/SystemConfig.java | 2 +- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 4f15ad81190..62cc984bc56 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -249,11 +249,11 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. -When using integration tools, dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.allow-out-of-band-upload`` JVM option to ``true``. +When using integration tools, dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. Files can be then uploaded by an integration tool with ``datasets/{id}/add`` api call, or uploaded directly to the storage and registerd in a dataset afterwards using the ``datasets/{id}/addFiles`` api call. -Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``allow-out-of-band-upload`` and will enable direct upload even with ``allow-out-of-band-upload`` not set (or set to false). -In other words, ``dataverse.files.\.allow-out-of-band-upload`` option opens the ``datasets/{id}/add`` and ``datasets/{id}/addFiles`` api endpoints without redirecting uploads in the UI. -Enabling the ``upload-redirect`` option allows then direct upload automatically, without the need of enabling the ``allow-out-of-band-upload`` (setting it to ``false`` does not have any effect in that case). +Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``upload-out-of-band`` and will enable direct upload even with ``upload-out-of-band`` not set (or set to false). +In other words, ``dataverse.files.\.upload-out-of-band`` option opens the ``datasets/{id}/add`` and ``datasets/{id}/addFiles`` api endpoints without redirecting uploads in the UI. +Enabling the ``upload-redirect`` option allows then direct upload automatically, without the need of enabling the ``upload-out-of-band`` (setting it to ``false`` does not have any effect in that case). The following sections describe how to set up various types of stores and how to configure for multiple stores. @@ -552,6 +552,7 @@ List of S3 Storage Options dataverse.files..bucket-name The bucket name. See above. (none) dataverse.files..download-redirect ``true``/``false`` Enable direct download or proxy through Dataverse. ``false`` dataverse.files..upload-redirect ``true``/``false`` Enable direct upload of files added to a dataset in the S3 store. ``false`` + dataverse.files..upload-out-of-band ``true``/``false`` Enable direct upload of files added to a dataset via API only. ``false`` dataverse.files..ingestsizelimit Maximum size of directupload files that should be ingested (none) dataverse.files..url-expiration-minutes If direct uploads/downloads: time until links expire. Optional. 60 dataverse.files..min-part-size Multipart direct uploads will occur for files larger than this. Optional. ``1024**3`` diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index a2ff546ef0a..85ca97d5f15 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -606,7 +606,7 @@ public static String getDriverPrefix(String driverId) { public static boolean isDirectUploadEnabled(String driverId) { return Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || - Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".allow-out-of-band-upload")); + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-out-of-band")); } //Check that storageIdentifier is consistent with store's config diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index db82df72b8a..9b549901d55 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1749,7 +1749,7 @@ public static boolean isPackageFile(DataFile dataFile) { public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) { String driverId = dataset.getEffectiveStorageDriverId(); boolean directEnabled = Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect")) || - Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".allow-out-of-band-upload")); + Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".aupload-out-of-band")); //Should only be requested when it is allowed, but we'll log a warning otherwise if(!directEnabled) { logger.warning("Direct upload not supported for files in this dataset: " + dataset.getId()); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 7d7006e708e..b45ad50ab1d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1114,7 +1114,7 @@ public boolean isDatafileValidationOnPublishEnabled() { } public boolean directUploadEnabled(DvObjectContainer container) { - // this method is used in UI only, therfore "dataverse.files." + driverId + ".allow-out-of-band-upload" is not used here + // this method is used in UI only, therfore "dataverse.files." + driverId + ".upload-out-of-band" is not used here return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); } From 49102ada3380863d115f5167343eb97446b35872 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 12:49:36 +0100 Subject: [PATCH 010/396] linking to api documentation --- doc/sphinx-guides/source/api/native-api.rst | 1 + doc/sphinx-guides/source/installation/config.rst | 5 ++--- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 6d68d648cb3..0341b6e07d1 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2348,6 +2348,7 @@ The fully expanded example above (without environment variables) looks like this Note: The ``id`` returned in the json response is the id of the file metadata version. +.. _add-file-metadata-api: Adding File Metadata ~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 62cc984bc56..b074a180c8f 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -250,10 +250,9 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. When using integration tools, dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. -Files can be then uploaded by an integration tool with ``datasets/{id}/add`` api call, or uploaded directly to the storage and registerd in a dataset afterwards using the ``datasets/{id}/addFiles`` api call. +Files can be then uploaded by an integration tool with :ref:`add-file-api` api call, or uploaded directly to the storage and registerd in a dataset afterwards using the :ref:`add-file-metadata-api` api call. Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``upload-out-of-band`` and will enable direct upload even with ``upload-out-of-band`` not set (or set to false). -In other words, ``dataverse.files.\.upload-out-of-band`` option opens the ``datasets/{id}/add`` and ``datasets/{id}/addFiles`` api endpoints without redirecting uploads in the UI. -Enabling the ``upload-redirect`` option allows then direct upload automatically, without the need of enabling the ``upload-out-of-band`` (setting it to ``false`` does not have any effect in that case). +In other words, ``dataverse.files.\.upload-out-of-band`` option opens the :ref:`add-file-api` and :ref:`add-file-metadata-api` api endpoints without redirecting uploads in the UI. The following sections describe how to set up various types of stores and how to configure for multiple stores. From e9d6df0bb6f23f4f4a8e7fe53213c91596980332 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 14:11:19 +0100 Subject: [PATCH 011/396] some improvements in the documentation --- doc/sphinx-guides/source/api/native-api.rst | 4 ++++ doc/sphinx-guides/source/installation/config.rst | 8 ++++---- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 0341b6e07d1..f075acf40f6 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1481,6 +1481,8 @@ In practice, you only need one the ``dataset_id`` or the ``persistentId``. The e print r.json() print r.status_code +This API call might result in an error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. It can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. + .. _add-remote-file-api: Add a Remote File to a Dataset @@ -2391,6 +2393,8 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/:persistentId/addFiles?persistentId=doi:10.5072/FK2/7U7YBV -F jsonData='[{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123456"}}, {"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357d53", "fileName":"file2.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123789"}}]' +This API call might result in an error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. It can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. + Updating File Metadata ~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index b074a180c8f..d3a22453453 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -249,10 +249,10 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. -When using integration tools, dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. -Files can be then uploaded by an integration tool with :ref:`add-file-api` api call, or uploaded directly to the storage and registerd in a dataset afterwards using the :ref:`add-file-metadata-api` api call. -Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``upload-out-of-band`` and will enable direct upload even with ``upload-out-of-band`` not set (or set to false). -In other words, ``dataverse.files.\.upload-out-of-band`` option opens the :ref:`add-file-api` and :ref:`add-file-metadata-api` api endpoints without redirecting uploads in the UI. +A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. +This option allows adding files with the :ref:`add-file-api` call. It also allows registering the metadata of a file with the :ref:`add-file-metadata-api` call for a file uploaded directly to the storage. +Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``upload-out-of-band`` option and will enable direct upload even with ``upload-out-of-band`` option not set (or set to false). +When neither of the two option is enabled, adding files with API will not be possible and will result with the "Dataset store configuration does not allow provided storageIdentifier" error. The following sections describe how to set up various types of stores and how to configure for multiple stores. From dc64aa23c3d4c364f46ad6e695e38ed3311455eb Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Thu, 17 Nov 2022 17:47:13 +0100 Subject: [PATCH 012/396] documentation improvements by Dieuwertje --- doc/sphinx-guides/source/api/native-api.rst | 4 ++-- doc/sphinx-guides/source/installation/config.rst | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index f075acf40f6..54e47a29b9d 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1481,7 +1481,7 @@ In practice, you only need one the ``dataset_id`` or the ``persistentId``. The e print r.json() print r.status_code -This API call might result in an error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. It can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. +This API call might result in the following error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. This error can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. .. _add-remote-file-api: @@ -2393,7 +2393,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/:persistentId/addFiles?persistentId=doi:10.5072/FK2/7U7YBV -F jsonData='[{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123456"}}, {"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357d53", "fileName":"file2.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123789"}}]' -This API call might result in an error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. It can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. +This API call might result in the following error: "Dataset store configuration does not allow provided storageIdentifier". One of the possible causes for this error is that out of band upload is not allowed for the storage that the provided identifier refers to. This error can be resolved by enabling the ``dataverse.files.\.upload-out-of-band`` JVM option. See :ref:`file-storage`. Updating File Metadata ~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index d3a22453453..4eadcc8ed9d 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -250,9 +250,9 @@ A Dataverse installation can alternately store files in a Swift or S3-compatible A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store. A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. -This option allows adding files with the :ref:`add-file-api` call. It also allows registering the metadata of a file with the :ref:`add-file-metadata-api` call for a file uploaded directly to the storage. -Notice that using S3-storage with ``dataverse.files.\.upload-redirect`` JVM option enabled supersedes the ``upload-out-of-band`` option and will enable direct upload even with ``upload-out-of-band`` option not set (or set to false). -When neither of the two option is enabled, adding files with API will not be possible and will result with the "Dataset store configuration does not allow provided storageIdentifier" error. +This option allows API users to add files with the :ref:`add-file-api` call. It also allows API users to register the metadata of a file with the :ref:`add-file-metadata-api` call for a file that was uploaded directly to the storage. +Note that if a Dataverse installation uses S3-storage while the ``dataverse.files.\.upload-redirect`` JVM option is enabled, the ``upload-out-of-band`` setting is overruled. This results in direct upload being enabled even with the ``upload-out-of-band`` option not set (or set to false). +When the ``upload-out-of-band`` option is not set to ``true`` and it isn't being overruled by the previously mentioned combination, adding files using the API will not be possible and will return the "Dataset store configuration does not allow provided storageIdentifier" error. The following sections describe how to set up various types of stores and how to configure for multiple stores. From 085fb8f44503d69354b5cb8f5793d8144dbde0e1 Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Mon, 21 Nov 2022 09:53:01 +0100 Subject: [PATCH 013/396] improvements in the documentation --- doc/sphinx-guides/source/installation/config.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 4eadcc8ed9d..467872bfdd4 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -251,6 +251,9 @@ A Dataverse installation may also be configured to reference some files (e.g. la A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\.upload-out-of-band`` JVM option to ``true``. This option allows API users to add files with the :ref:`add-file-api` call. It also allows API users to register the metadata of a file with the :ref:`add-file-metadata-api` call for a file that was uploaded directly to the storage. + +The option is useful in cases in which an S3 storage is not used or made public, as required by the ``dataverse.files.\.upload-redirect`` option. An example would be building a tool for synchronizing datasets with files from a third-party repository. In such a case, the tool would upload files directly to the storage, and then use :ref:`add-file-metadata-api` to link them to a dataset. + Note that if a Dataverse installation uses S3-storage while the ``dataverse.files.\.upload-redirect`` JVM option is enabled, the ``upload-out-of-band`` setting is overruled. This results in direct upload being enabled even with the ``upload-out-of-band`` option not set (or set to false). When the ``upload-out-of-band`` option is not set to ``true`` and it isn't being overruled by the previously mentioned combination, adding files using the API will not be possible and will return the "Dataset store configuration does not allow provided storageIdentifier" error. From d870e202dccac268cc3f099277559d3e473b7944 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 9 Jan 2023 13:18:06 +0100 Subject: [PATCH 014/396] chore(deps): upgrade Nimbus OIDC SDK to latest 10.4 release #9268 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 8b6f98c5896..a5d52fd7545 100644 --- a/pom.xml +++ b/pom.xml @@ -381,7 +381,7 @@ com.nimbusds oauth2-oidc-sdk - 9.41.1 + 10.4 From 2ee66618ed77d55878300a7baaa4fa4a94ac7162 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 9 Jan 2023 15:52:14 +0100 Subject: [PATCH 015/396] style(oidc): make class fields final in OIDCAuthProvider These values should not be changed once the provider has been initialized. --- .../oauth2/oidc/OIDCAuthProvider.java | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index a9c44010950..4b6c575cfaf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -54,15 +54,15 @@ public class OIDCAuthProvider extends AbstractOAuth2AuthenticationProvider { protected String title = "Open ID Connect"; protected List scope = Arrays.asList("openid", "email", "profile"); - Issuer issuer; - ClientAuthentication clientAuth; - OIDCProviderMetadata idpMetadata; + final Issuer issuer; + final ClientAuthentication clientAuth; + final OIDCProviderMetadata idpMetadata; public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEndpointURL) throws AuthorizationSetupException { this.clientSecret = aClientSecret; // nedded for state creation this.clientAuth = new ClientSecretBasic(new ClientID(aClientId), new Secret(aClientSecret)); this.issuer = new Issuer(issuerEndpointURL); - getMetadata(); + this.idpMetadata = getMetadata(); } /** @@ -74,7 +74,9 @@ public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEnd * @return false */ @Override - public boolean isDisplayIdentifier() { return false; } + public boolean isDisplayIdentifier() { + return false; + } /** * Setup metadata from OIDC provider during creation of the provider representation @@ -82,9 +84,14 @@ public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEnd * @throws IOException when sth. goes wrong with the retrieval * @throws ParseException when the metadata is not parsable */ - void getMetadata() throws AuthorizationSetupException { + OIDCProviderMetadata getMetadata() throws AuthorizationSetupException { try { - this.idpMetadata = getMetadata(this.issuer); + var metadata = getMetadata(this.issuer); + // Assert that the provider supports the code flow + if (metadata.getResponseTypes().stream().noneMatch(ResponseType::impliesCodeFlow)) { + throw new AuthorizationSetupException("OIDC provider at "+this.issuer.getValue()+" does not support code flow, disabling."); + } + return metadata; } catch (IOException ex) { logger.severe("OIDC provider metadata at \"+issuerEndpointURL+\" not retrievable: "+ex.getMessage()); throw new AuthorizationSetupException("OIDC provider metadata at "+this.issuer.getValue()+" not retrievable."); @@ -92,11 +99,6 @@ void getMetadata() throws AuthorizationSetupException { logger.severe("OIDC provider metadata at \"+issuerEndpointURL+\" not parsable: "+ex.getMessage()); throw new AuthorizationSetupException("OIDC provider metadata at "+this.issuer.getValue()+" not parsable."); } - - // Assert that the provider supports the code flow - if (! this.idpMetadata.getResponseTypes().stream().filter(idp -> idp.impliesCodeFlow()).findAny().isPresent()) { - throw new AuthorizationSetupException("OIDC provider at "+this.issuer.getValue()+" does not support code flow, disabling."); - } } /** From 0c7db6614669ecc40e96ffb029be4f21ed04f4db Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:27:55 +0100 Subject: [PATCH 016/396] chore(deps): update Testcontainers to latest version Also updating Postgres Server version in "tc" Maven profile. --- modules/dataverse-parent/pom.xml | 2 +- pom.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 3911e9d5bbb..e316a5508ce 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -167,7 +167,7 @@ 5.0.0-RC2 - 1.15.0 + 1.17.6 2.10.1 4.13.1 diff --git a/pom.xml b/pom.xml index a5d52fd7545..56871c7fd56 100644 --- a/pom.xml +++ b/pom.xml @@ -757,7 +757,7 @@ tc true - 9.6 + 13.0 From 5681d24520ac017eb925bc058ecaef877eedd14b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:28:52 +0100 Subject: [PATCH 017/396] chore(deps): add Keycloak Testcontainer module for tests --- pom.xml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pom.xml b/pom.xml index 56871c7fd56..63c362ba904 100644 --- a/pom.xml +++ b/pom.xml @@ -570,6 +570,12 @@ postgresql test + + com.github.dasniko + testcontainers-keycloak + 2.4.0 + test + org.mockito mockito-core From 9f534c4e4a59d7b33b9a0e4a5a876819e9278c47 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:32:12 +0100 Subject: [PATCH 018/396] feat(tests): extend JvmSetting helper for test class method references Instead of only allowing to supply static String values for a setting, also allow referencing a static method in the test class to retrieve dynamic data. This is inspired by the JUnit5 MethodSource example. --- .../iq/dataverse/util/testing/JvmSetting.java | 6 ++++- .../util/testing/JvmSettingExtension.java | 25 ++++++++++++++++++- 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java index f54cadaf253..85b10489f15 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java @@ -39,6 +39,8 @@ @ResourceLock(value = Resources.SYSTEM_PROPERTIES, mode = ResourceAccessMode.READ_WRITE) public @interface JvmSetting { + static final String PLACEHOLDER = "NULL"; + /** * The key of the system property to be set. */ @@ -47,10 +49,12 @@ /** * The value of the system property to be set. */ - String value(); + String value() default PLACEHOLDER; String[] varArgs() default {}; + String method() default PLACEHOLDER; + /** * Containing annotation of repeatable {@code @SetSystemProperty}. */ diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java index 56e87589139..17728e75ffc 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java @@ -5,6 +5,11 @@ import org.junit.jupiter.api.extension.BeforeTestExecutionCallback; import org.junit.jupiter.api.extension.ExtensionContext; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; + +import static edu.harvard.iq.dataverse.util.testing.JvmSetting.PLACEHOLDER; + public class JvmSettingExtension implements BeforeTestExecutionCallback, AfterTestExecutionCallback { private ExtensionContext.Store getStore(ExtensionContext context) { @@ -28,7 +33,25 @@ public void beforeTestExecution(ExtensionContext extensionContext) throws Except } // set to new value - System.setProperty(settingName, setting.value()); + if (setting.value().equals(PLACEHOLDER) && setting.method().equals(PLACEHOLDER)) { + throw new IllegalArgumentException("You must either provide a value or a method reference " + + "for key JvmSettings." + setting.key()); + } + + // retrieve value from static test class method if no setting given + if (setting.value().equals(PLACEHOLDER)) { + extensionContext.getTestClass().ifPresent(klass -> { + try { + Method valueMethod = klass.getDeclaredMethod(setting.method()); + valueMethod.setAccessible(true); + System.setProperty(settingName, (String)valueMethod.invoke(null)); + } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { + throw new RuntimeException(e); + } + }); + } else { + System.setProperty(settingName, setting.value()); + } } }); } From 5cd9f2eb8bd01b88cde28e41c8b27c52656c62b9 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:40:13 +0100 Subject: [PATCH 019/396] doc(dev): add description for method references in @JvmSetting helper --- doc/sphinx-guides/source/developers/testing.rst | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 4b3d5fd0a55..2d1948449a9 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -89,8 +89,12 @@ For unit tests, the most interesting part is to set a JVM setting just for the c Please use the ``@JvmSetting(key = JvmSettings.XXX, value = "")`` annotation on a test method or a test class to set and clear the property automatically. -To set arbitrary system properties for the current test, a similar extension -``@SystemProperty(key = "", value = "")`` has been added. +Inspired by JUnit's ``@MethodSource`` annotation, you may use ``@JvmSetting(key = JvmSettings.XXX, method = "zzz")`` +to reference a method located in the same test class by name (i. e. ``private static String zzz() {}``) to allow +retrieving dynamic data instead of String constants only. (Note the requirement for a *static* method!) + +To set arbitrary system properties for the current test, a similar extension ``@SystemProperty(key = "", value = "")`` +has been added. (Note: it does not support method references.) Both extensions will ensure the global state of system properties is non-interfering for test executions. Tests using these extensions will be executed in serial. From ebd8eede980fa1b3cce3e2f30538c9a79c180eb2 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:45:04 +0100 Subject: [PATCH 020/396] feat(settings): add authentication settings for OIDC to JvmSettings #9268 --- .../harvard/iq/dataverse/settings/JvmSettings.java | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index bc5a73cd958..46b79b06466 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -64,6 +64,17 @@ public enum JvmSettings { SCOPE_API(PREFIX, "api"), API_SIGNING_SECRET(SCOPE_API, "signing-secret"), + // AUTH SETTINGS + SCOPE_AUTH(PREFIX, "auth"), + // AUTH: OIDC SETTINGS + SCOPE_OIDC(SCOPE_AUTH, "oidc"), + OIDC_ENABLED(SCOPE_OIDC, "enabled"), + OIDC_TITLE(SCOPE_OIDC, "title"), + OIDC_SUBTITLE(SCOPE_OIDC, "subtitle"), + OIDC_AUTH_SERVER_URL(SCOPE_OIDC, "auth-server-url"), + OIDC_CLIENT_ID(SCOPE_OIDC, "client-id"), + OIDC_CLIENT_SECRET(SCOPE_OIDC, "client-secret"), + ; private static final String SCOPE_SEPARATOR = "."; From 1bff1be736a1362dd779be66415919961fb44599 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:48:18 +0100 Subject: [PATCH 021/396] feat(auth): add OIDC provider provisioning via MPCONFIG #9268 Only one provider can be configured via MPCONFIG for now. The provider is configured with an appropriate ID to distinguish it from other providers configured via the API. It can be configured in addition to other OIDC providers when desired. --- ...ationProvidersRegistrationServiceBean.java | 10 ++++++++++ .../OIDCAuthenticationProviderFactory.java | 20 +++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java index 6289865baf0..79dabe1d390 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java @@ -17,6 +17,7 @@ import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2AuthenticationProviderFactory; import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactory; import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProviderFactory; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean; import java.util.HashMap; import java.util.Map; @@ -121,6 +122,15 @@ public void startup() { logger.log(Level.SEVERE, "Exception setting up the authentication provider '" + row.getId() + "': " + ex.getMessage(), ex); } }); + + // Add providers registered via MPCONFIG + if (JvmSettings.OIDC_ENABLED.lookupOptional(Boolean.class).orElse(false)) { + try { + registerProvider(OIDCAuthenticationProviderFactory.buildFromSettings()); + } catch (AuthorizationSetupException e) { + logger.log(Level.SEVERE, "Exception setting up an OIDC auth provider via MicroProfile Config", e); + } + } } private void registerProviderFactory(AuthenticationProviderFactory aFactory) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java index c6d1a28e19d..f4d631adea3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java @@ -5,6 +5,7 @@ import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderFactory; import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderRow; import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2AuthenticationProviderFactory; +import edu.harvard.iq.dataverse.settings.JvmSettings; import java.util.Map; @@ -44,4 +45,23 @@ public AuthenticationProvider buildProvider( AuthenticationProviderRow aRow ) th return oidc; } + + /** + * Build an OIDC provider from MicroProfile Config provisioned details + * @return The configured auth provider + * @throws AuthorizationSetupException + */ + public static AuthenticationProvider buildFromSettings() throws AuthorizationSetupException { + OIDCAuthProvider oidc = new OIDCAuthProvider( + JvmSettings.OIDC_CLIENT_ID.lookup(), + JvmSettings.OIDC_CLIENT_SECRET.lookup(), + JvmSettings.OIDC_AUTH_SERVER_URL.lookup() + ); + + oidc.setId("oidc-mpconfig"); + oidc.setTitle(JvmSettings.OIDC_TITLE.lookupOptional().orElse("OpenID Connect")); + oidc.setSubTitle(JvmSettings.OIDC_SUBTITLE.lookupOptional().orElse("OpenID Connect")); + + return oidc; + } } From fb11096562269d3704dd74504b6e665a6a6a843e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:48:48 +0100 Subject: [PATCH 022/396] style(auth): slight reformat of OIDC provider factory #9268 --- .../oauth2/oidc/OIDCAuthenticationProviderFactory.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java index f4d631adea3..89cf1cb986d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java @@ -38,7 +38,12 @@ public String getInfo() { public AuthenticationProvider buildProvider( AuthenticationProviderRow aRow ) throws AuthorizationSetupException { Map factoryData = OAuth2AuthenticationProviderFactory.parseFactoryData(aRow.getFactoryData()); - OIDCAuthProvider oidc = new OIDCAuthProvider(factoryData.get("clientId"), factoryData.get("clientSecret"), factoryData.get("issuer")); + OIDCAuthProvider oidc = new OIDCAuthProvider( + factoryData.get("clientId"), + factoryData.get("clientSecret"), + factoryData.get("issuer") + ); + oidc.setId(aRow.getId()); oidc.setTitle(aRow.getTitle()); oidc.setSubTitle(aRow.getSubtitle()); From 1fb0f588262a92010c5f0afa52d336a707358a6b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:51:24 +0100 Subject: [PATCH 023/396] test(auth): add integration test for OIDC provisioning via MPCONFIG #9268 Using Testcontainers to start a Keycloak instance with our default development realm, the provider is created using MPCONFIG settings. --- .../OIDCAuthenticationProviderFactoryIT.java | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java new file mode 100644 index 00000000000..53cfcca2742 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -0,0 +1,37 @@ +package edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc; + +import dasniko.testcontainers.keycloak.KeycloakContainer; +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +@Tag("testcontainers") +@Testcontainers +class OIDCAuthenticationProviderFactoryIT { + + static final String clientId = "oidc-client"; + static final String clientSecret = "ss6gE8mODCDfqesQaSG3gwUwZqZt547E"; + static final String realm = "oidc-realm"; + + @Container + static KeycloakContainer keycloakContainer = new KeycloakContainer().withRealmImportFile("keycloak/oidc-realm.json"); + + // simple method to retrieve the issuer URL, referenced to by @JvmSetting annotations + private static String getAuthUrl() { + return keycloakContainer.getAuthServerUrl() + "realms/" + realm; + } + + @Test + @JvmSetting(key = JvmSettings.OIDC_CLIENT_ID, value = clientId) + @JvmSetting(key = JvmSettings.OIDC_CLIENT_SECRET, value = clientSecret) + @JvmSetting(key = JvmSettings.OIDC_AUTH_SERVER_URL, method = "getAuthUrl") + void testCreateProvider() throws Exception { + OIDCAuthProvider oidcAuthProvider = (OIDCAuthProvider) OIDCAuthenticationProviderFactory.buildFromSettings(); + assertTrue(oidcAuthProvider.getMetadata().getTokenEndpointURI().toString().startsWith(keycloakContainer.getAuthServerUrl())); + } +} \ No newline at end of file From e31dba3da3dc267e963c537da4d0076ed11eee44 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Jan 2023 20:54:39 +0100 Subject: [PATCH 024/396] build(auth): make resources in /conf avail to tests #9268 To use data in /conf for tests, adding the folder in Maven to copy them to the test classpath as resources helps to use them in tests very easily. All dirs under /conf will be copied to the /target/test-classes directory recursively. This also works when running tests in IDEs like IntelliJ. --- pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pom.xml b/pom.xml index 63c362ba904..a26071d253b 100644 --- a/pom.xml +++ b/pom.xml @@ -596,6 +596,11 @@ + + + ${project.basedir}/conf + + - + src/main/java From 4d7df9c4abfdebe4b8d19382fd836ff9827f5053 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 23 Feb 2023 11:13:17 -0500 Subject: [PATCH 027/396] (draft/work in progress) framework for a new file creation command and storage quota enforcement #9361. --- .../iq/dataverse/DataFileServiceBean.java | 57 ++ .../iq/dataverse/DatasetServiceBean.java | 2 +- .../iq/dataverse/EditDatafilesPage.java | 30 +- .../impl/CreateNewDataFilesCommand.java | 555 ++++++++++++++++++ .../settings/SettingsServiceBean.java | 10 +- .../harvard/iq/dataverse/util/FileUtil.java | 36 +- .../iq/dataverse/util/SystemConfig.java | 16 +- .../util/bagit/data/FileUtilWrapper.java | 7 +- .../FileExceedsStorageQuotaException.java | 22 + src/main/java/propertyFiles/Bundle.properties | 1 + src/main/webapp/editFilesFragment.xhtml | 1 + 11 files changed, 718 insertions(+), 19 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/util/file/FileExceedsStorageQuotaException.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 7da06f36be4..328f2aa59c0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -67,6 +67,8 @@ public class DataFileServiceBean implements java.io.Serializable { @EJB EmbargoServiceBean embargoService; + @EJB SystemConfig systemConfig; + @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; @@ -140,6 +142,36 @@ public class DataFileServiceBean implements java.io.Serializable { */ public static final String MIME_TYPE_PACKAGE_FILE = "application/vnd.dataverse.file-package"; + public class UserStorageQuota { + private Long totalAllocatedInBytes = 0L; + private Long totalUsageInBytes = 0L; + + public UserStorageQuota(Long allocated, Long used) { + this.totalAllocatedInBytes = allocated; + this.totalUsageInBytes = used; + } + + public Long getTotalAllocatedInBytes() { + return totalAllocatedInBytes; + } + + public void setTotalAllocatedInBytes(Long totalAllocatedInBytes) { + this.totalAllocatedInBytes = totalAllocatedInBytes; + } + + public Long getTotalUsageInBytes() { + return totalUsageInBytes; + } + + public void setTotalUsageInBytes(Long totalUsageInBytes) { + this.totalUsageInBytes = totalUsageInBytes; + } + + public Long getRemainingQuotaInBytes() { + return totalAllocatedInBytes - totalUsageInBytes; + } + } + public DataFile find(Object pk) { return em.find(DataFile.class, pk); } @@ -1657,4 +1689,29 @@ public Embargo findEmbargo(Long id) { DataFile d = find(id); return d.getEmbargo(); } + + public Long getStorageUsageByCreator(AuthenticatedUser user) { + Query query = em.createQuery("SELECT SUM(o.filesize) FROM DataFile o WHERE o.creator.id=:creatorId"); + + try { + Long totalSize = (Long)query.setParameter("creatorId", user.getId()).getSingleResult(); + logger.info("total size for user: "+totalSize); + return totalSize == null ? 0L : totalSize; + } catch (NoResultException nre) { // ? + logger.info("NoResultException, returning 0L"); + return 0L; + } + } + + public UserStorageQuota getUserStorageQuota(AuthenticatedUser user, Dataset dataset) { + // this is for testing only - one pre-set, installation-wide quota limit + // for everybody: + Long totalAllocated = systemConfig.getTestStorageQuotaLimit(); + // again, this is for testing only - we are only counting the total size + // of all the files created by this user; it will likely be a much more + // complex calculation in real life applications: + Long totalUsed = getStorageUsageByCreator(user); + + return new UserStorageQuota(totalAllocated, totalUsed); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 91ec050fe5c..4e522bbd441 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -1166,5 +1166,5 @@ public void deleteHarvestedDataset(Dataset dataset, DataverseRequest request, Lo hdLogger.warning("Failed to destroy the dataset"); } } - + } diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 1c033b37872..74c4e782d56 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -28,6 +28,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.RequestRsyncScriptCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDataFilesCommand; import edu.harvard.iq.dataverse.ingest.IngestRequest; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.ingest.IngestUtil; @@ -187,7 +188,13 @@ public enum Referrer { // Used to store results of permissions checks private final Map datasetPermissionMap = new HashMap<>(); // { Permission human_name : Boolean } + // Size limit of an individual file: (set for the storage volume used) private Long maxFileUploadSizeInBytes = null; + // Total amount of data that the user should be allowed to upload. + // Will be calculated in real time based on various level quotas - + // for this user and/or this collection/dataset, etc. We should + // assume that it may change during the user session. + private Long maxTotalUploadSizeInBytes = null; private Long maxIngestSizeInBytes = null; // CSV: 4.8 MB, DTA: 976.6 KB, XLSX: 5.7 MB, etc. private String humanPerFormatTabularLimits = null; @@ -336,6 +343,14 @@ public Long getMaxFileUploadSizeInBytes() { public String getHumanMaxFileUploadSizeInBytes() { return FileSizeChecker.bytesToHumanReadable(this.maxFileUploadSizeInBytes); } + + public Long getMaxTotalUploadSizeInBytes() { + return maxTotalUploadSizeInBytes; + } + + public String getHumanMaxTotalUploadSizeInBytes() { + return FileSizeChecker.bytesToHumanReadable(maxTotalUploadSizeInBytes); + } public boolean isUnlimitedUploadFileSize() { @@ -563,7 +578,6 @@ public String init() { this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit(); - this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); hasValidTermsOfAccess = isHasValidTermsOfAccess(); if (!hasValidTermsOfAccess) { @@ -2024,7 +2038,13 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { // Note: A single uploaded file may produce multiple datafiles - // for example, multiple files can be extracted from an uncompressed // zip file. - CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig); + ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig); + + Command cmd; + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); + CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); + + dFileList = createDataFilesResult.getDataFiles(); String createDataFilesError = editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult); if(createDataFilesError != null) { @@ -2033,8 +2053,14 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { } } catch (IOException ioex) { + // shouldn't we try and communicate to the user what happened? logger.warning("Failed to process and/or save the file " + uFile.getFileName() + "; " + ioex.getMessage()); return; + } catch (CommandException cex) { + // shouldn't we try and communicate to the user what happened? + errorMessages.add(cex.getMessage()); + uploadComponentId = event.getComponent().getClientId(); + return; } /*catch (FileExceedsMaxSizeException ex) { logger.warning("Failed to process and/or save the file " + uFile.getFileName() + "; " + ex.getMessage()); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java new file mode 100644 index 00000000000..9f281f9446d --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -0,0 +1,555 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; +import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker; +import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; +import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper; +import edu.harvard.iq.dataverse.DataFileServiceBean.UserStorageQuota; +import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; +import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.FileUtil; +import static edu.harvard.iq.dataverse.util.FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT; +import static edu.harvard.iq.dataverse.util.FileUtil.createIngestFailureReport; +import static edu.harvard.iq.dataverse.util.FileUtil.determineFileType; +import static edu.harvard.iq.dataverse.util.FileUtil.determineFileTypeByNameAndExtension; +import static edu.harvard.iq.dataverse.util.FileUtil.getFilesTempDirectory; +import static edu.harvard.iq.dataverse.util.FileUtil.saveInputStreamInTempFile; +import static edu.harvard.iq.dataverse.util.FileUtil.useRecognizedType; +import edu.harvard.iq.dataverse.util.ShapefileHandler; +import edu.harvard.iq.dataverse.util.StringUtil; +import edu.harvard.iq.dataverse.util.file.BagItFileHandler; +import edu.harvard.iq.dataverse.util.file.BagItFileHandlerFactory; +import edu.harvard.iq.dataverse.util.file.CreateDataFileResult; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.Charset; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.logging.Logger; +import java.util.zip.GZIPInputStream; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; +import javax.enterprise.inject.spi.CDI; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; + +/** + * + * @author landreev + */ +@RequiredPermissions( Permission.EditDataset ) +public class CreateNewDataFilesCommand extends AbstractCommand { + private static final Logger logger = Logger.getLogger(CreateNewDataFilesCommand.class.getCanonicalName()); + + private final DatasetVersion version; + private final InputStream inputStream; + private final String fileName; + private final String suppliedContentType; + private final String newStorageIdentifier; + private final String newCheckSum; + private DataFile.ChecksumType newCheckSumType; + + public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum) { + this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, newCheckSum, null); + } + + public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, DataFile.ChecksumType newCheckSumType) { + super(aRequest, version.getDataset()); + + this.version = version; + this.inputStream = inputStream; + this.fileName = fileName; + this.suppliedContentType = suppliedContentType; + this.newStorageIdentifier = newStorageIdentifier; + this.newCheckSum = newCheckSum; + this.newCheckSumType = newCheckSumType; + } + + @Override + public CreateDataFileResult execute(CommandContext ctxt) throws CommandException { + List datafiles = new ArrayList<>(); + + //When there is no checksum/checksumtype being sent (normal upload, needs to be calculated), set the type to the current default + if(newCheckSumType == null) { + newCheckSumType = ctxt.systemConfig().getFileFixityChecksumAlgorithm(); + } + + String warningMessage = null; + + // save the file, in the temporary location for now: + Path tempFile = null; + + Long fileSizeLimit = ctxt.systemConfig().getMaxFileUploadSizeForStore(version.getDataset().getEffectiveStorageDriverId()); + Long storageQuotaLimit = null; + + if (ctxt.systemConfig().isStorageQuotasEnforced()) { + //storageQuotaLimit = ctxt.files().getClass()...; + UserStorageQuota quota = ctxt.files().getUserStorageQuota(super.getRequest().getAuthenticatedUser(), this.version.getDataset()); + if (quota != null) { + storageQuotaLimit = quota.getRemainingQuotaInBytes(); + } + } + String finalType = null; + + if (newStorageIdentifier == null) { + if (getFilesTempDirectory() != null) { + try { + tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload"); + // "temporary" location is the key here; this is why we are not using + // the DataStore framework for this - the assumption is that + // temp files will always be stored on the local filesystem. + // -- L.A. Jul. 2014 + logger.fine("Will attempt to save the file as: " + tempFile.toString()); + Files.copy(inputStream, tempFile, StandardCopyOption.REPLACE_EXISTING); + } catch (IOException ioex) { + throw new CommandExecutionException("Failed to save the upload as a temp file (temp disk space?)", ioex, this); + } + + // A file size check, before we do anything else: + // (note that "no size limit set" = "unlimited") + // (also note, that if this is a zip file, we'll be checking + // the size limit for each of the individual unpacked files) + Long fileSize = tempFile.toFile().length(); + if (fileSizeLimit != null && fileSize > fileSizeLimit) { + try { + tempFile.toFile().delete(); + } catch (Exception ex) { + // ignore - but log a warning + logger.warning("Could not remove temp file " + tempFile.getFileName()); + } + throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit)), this); + } + + } else { + throw new CommandExecutionException("Temp directory is not configured.", this); + } + + logger.fine("mime type supplied: " + suppliedContentType); + + // Let's try our own utilities (Jhove, etc.) to determine the file type + // of the uploaded file. (We may already have a mime type supplied for this + // file - maybe the type that the browser recognized on upload; or, if + // it's a harvest, maybe the remote server has already given us the type + // for this file... with our own type utility we may or may not do better + // than the type supplied: + // -- L.A. + String recognizedType = null; + + try { + recognizedType = determineFileType(tempFile.toFile(), fileName); + logger.fine("File utility recognized the file as " + recognizedType); + if (recognizedType != null && !recognizedType.equals("")) { + if (useRecognizedType(suppliedContentType, recognizedType)) { + finalType = recognizedType; + } + } + + } catch (Exception ex) { + logger.warning("Failed to run the file utility mime type check on file " + fileName); + } + + if (finalType == null) { + finalType = (suppliedContentType == null || suppliedContentType.equals("")) + ? MIME_TYPE_UNDETERMINED_DEFAULT + : suppliedContentType; + } + + // A few special cases: + // if this is a gzipped FITS file, we'll uncompress it, and ingest it as + // a regular FITS file: + if (finalType.equals("application/fits-gzipped")) { + + InputStream uncompressedIn = null; + String finalFileName = fileName; + // if the file name had the ".gz" extension, remove it, + // since we are going to uncompress it: + if (fileName != null && fileName.matches(".*\\.gz$")) { + finalFileName = fileName.replaceAll("\\.gz$", ""); + } + + DataFile datafile = null; + try { + uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile())); + File unZippedTempFile = saveInputStreamInTempFile(uncompressedIn, fileSizeLimit, storageQuotaLimit); + datafile = FileUtil.createSingleDataFile(version, unZippedTempFile, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT, ctxt.systemConfig().getFileFixityChecksumAlgorithm()); + } catch (IOException | FileExceedsMaxSizeException | FileExceedsStorageQuotaException ioex) { + // it looks like we simply skip the file silently, if its uncompressed size + // exceeds the limit. we should probably report this in detail instead. + datafile = null; + } finally { + if (uncompressedIn != null) { + try { + uncompressedIn.close(); + } catch (IOException e) { + } + } + } + + // If we were able to produce an uncompressed file, we'll use it + // to create and return a final DataFile; if not, we're not going + // to do anything - and then a new DataFile will be created further + // down, from the original, uncompressed file. + if (datafile != null) { + // remove the compressed temp file: + try { + tempFile.toFile().delete(); + } catch (SecurityException ex) { + // (this is very non-fatal) + logger.warning("Failed to delete temporary file " + tempFile.toString()); + } + + datafiles.add(datafile); + return CreateDataFileResult.success(fileName, finalType, datafiles); + } + + // If it's a ZIP file, we are going to unpack it and create multiple + // DataFile objects from its contents: + } else if (finalType.equals("application/zip")) { + + ZipInputStream unZippedIn = null; + ZipEntry zipEntry = null; + + int fileNumberLimit = ctxt.systemConfig().getZipUploadFilesLimit(); + + try { + Charset charset = null; + /* + TODO: (?) + We may want to investigate somehow letting the user specify + the charset for the filenames in the zip file... + - otherwise, ZipInputStream bails out if it encounteres a file + name that's not valid in the current charest (i.e., UTF-8, in + our case). It would be a bit trickier than what we're doing for + SPSS tabular ingests - with the lang. encoding pulldown menu - + because this encoding needs to be specified *before* we upload and + attempt to unzip the file. + -- L.A. 4.0 beta12 + logger.info("default charset is "+Charset.defaultCharset().name()); + if (Charset.isSupported("US-ASCII")) { + logger.info("charset US-ASCII is supported."); + charset = Charset.forName("US-ASCII"); + if (charset != null) { + logger.info("was able to obtain charset for US-ASCII"); + } + + } + */ + + if (charset != null) { + unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset); + } else { + unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile())); + } + + Long storageQuotaLimitForUnzippedFiles = storageQuotaLimit; + while (true) { + try { + zipEntry = unZippedIn.getNextEntry(); + } catch (IllegalArgumentException iaex) { + // Note: + // ZipInputStream documentation doesn't even mention that + // getNextEntry() throws an IllegalArgumentException! + // but that's what happens if the file name of the next + // entry is not valid in the current CharSet. + // -- L.A. + warningMessage = "Failed to unpack Zip file. (Unknown Character Set used in a file name?) Saving the file as is."; + logger.warning(warningMessage); + throw new IOException(); + } + + if (zipEntry == null) { + break; + } + // Note that some zip entries may be directories - we + // simply skip them: + + if (!zipEntry.isDirectory()) { + if (datafiles.size() > fileNumberLimit) { + logger.warning("Zip upload - too many files."); + warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit + + "); please upload a zip archive with fewer files, if you want them to be ingested " + + "as individual DataFiles."; + throw new IOException(); + } + + String fileEntryName = zipEntry.getName(); + logger.fine("ZipEntry, file: " + fileEntryName); + + if (fileEntryName != null && !fileEntryName.equals("")) { + + String shortName = fileEntryName.replaceFirst("^.*[\\/]", ""); + + // Check if it's a "fake" file - a zip archive entry + // created for a MacOS X filesystem element: (these + // start with "._") + if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) { + // OK, this seems like an OK file entry - we'll try + // to read it and create a DataFile with it: + + File unZippedTempFile = saveInputStreamInTempFile(unZippedIn, fileSizeLimit, storageQuotaLimitForUnzippedFiles); + DataFile datafile = FileUtil.createSingleDataFile(version, + unZippedTempFile, + null, + shortName, + MIME_TYPE_UNDETERMINED_DEFAULT, + ctxt.systemConfig().getFileFixityChecksumAlgorithm(), null, false); + + storageQuotaLimitForUnzippedFiles = storageQuotaLimitForUnzippedFiles - datafile.getFilesize(); + + if (!fileEntryName.equals(shortName)) { + // If the filename looks like a hierarchical folder name (i.e., contains slashes and backslashes), + // we'll extract the directory name; then subject it to some "aggressive sanitizing" - strip all + // the leading, trailing and duplicate slashes; then replace all the characters that + // don't pass our validation rules. + String directoryName = fileEntryName.replaceFirst("[\\\\/][\\\\/]*[^\\\\/]*$", ""); + directoryName = StringUtil.sanitizeFileDirectory(directoryName, true); + // if (!"".equals(directoryName)) { + if (!StringUtil.isEmpty(directoryName)) { + logger.fine("setting the directory label to " + directoryName); + datafile.getFileMetadata().setDirectoryLabel(directoryName); + } + } + + if (datafile != null) { + // We have created this datafile with the mime type "unknown"; + // Now that we have it saved in a temporary location, + // let's try and determine its real type: + + String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier(); + + try { + recognizedType = determineFileType(new File(tempFileName), shortName); + logger.fine("File utility recognized unzipped file as " + recognizedType); + if (recognizedType != null && !recognizedType.equals("")) { + datafile.setContentType(recognizedType); + } + } catch (Exception ex) { + logger.warning("Failed to run the file utility mime type check on file " + fileName); + } + + datafiles.add(datafile); + } + } + } + } + unZippedIn.closeEntry(); + + } + + } catch (IOException ioex) { + // just clear the datafiles list and let + // ingest default to creating a single DataFile out + // of the unzipped file. + logger.warning("Unzipping failed; rolling back to saving the file as is."); + if (warningMessage == null) { + warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed"); + } + + datafiles.clear(); + } catch (FileExceedsMaxSizeException femsx) { + logger.warning("One of the unzipped files exceeds the size limit; resorting to saving the file as is. " + femsx.getMessage()); + warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.size", Arrays.asList(FileSizeChecker.bytesToHumanReadable(fileSizeLimit))); + datafiles.clear(); + } catch (FileExceedsStorageQuotaException fesqx) { + logger.warning("One of the unzipped files exceeds the storage quota limit; resorting to saving the file as is. " + fesqx.getMessage()); + warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.quota", Arrays.asList(FileSizeChecker.bytesToHumanReadable(storageQuotaLimit))); + datafiles.clear(); + } finally { + if (unZippedIn != null) { + try { + unZippedIn.close(); + } catch (Exception zEx) { + } + } + } + if (datafiles.size() > 0) { + // remove the uploaded zip file: + try { + Files.delete(tempFile); + } catch (IOException ioex) { + // do nothing - it's just a temp file. + logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); + } + // and return: + return CreateDataFileResult.success(fileName, finalType, datafiles); + } + + } else if (finalType.equalsIgnoreCase(ShapefileHandler.SHAPEFILE_FILE_TYPE)) { + // Shape files may have to be split into multiple files, + // one zip archive per each complete set of shape files: + + // File rezipFolder = new File(this.getFilesTempDirectory()); + File rezipFolder = FileUtil.getShapefileUnzipTempDirectory(); + + IngestServiceShapefileHelper shpIngestHelper; + shpIngestHelper = new IngestServiceShapefileHelper(tempFile.toFile(), rezipFolder); + + boolean didProcessWork = shpIngestHelper.processFile(); + if (!(didProcessWork)) { + logger.severe("Processing of zipped shapefile failed."); + return CreateDataFileResult.error(fileName, finalType); + } + + try { + Long storageQuotaLimitForRezippedFiles = storageQuotaLimit; + + for (File finalFile : shpIngestHelper.getFinalRezippedFiles()) { + FileInputStream finalFileInputStream = new FileInputStream(finalFile); + finalType = FileUtil.determineContentType(finalFile); + if (finalType == null) { + logger.warning("Content type is null; but should default to 'MIME_TYPE_UNDETERMINED_DEFAULT'"); + continue; + } + + File unZippedShapeTempFile = saveInputStreamInTempFile(finalFileInputStream, fileSizeLimit, storageQuotaLimitForRezippedFiles); + DataFile new_datafile = FileUtil.createSingleDataFile(version, unZippedShapeTempFile, finalFile.getName(), finalType, ctxt.systemConfig().getFileFixityChecksumAlgorithm()); + + String directoryName = null; + String absolutePathName = finalFile.getParent(); + if (absolutePathName != null) { + if (absolutePathName.length() > rezipFolder.toString().length()) { + // This file lives in a subfolder - we want to + // preserve it in the FileMetadata: + directoryName = absolutePathName.substring(rezipFolder.toString().length() + 1); + + if (!StringUtil.isEmpty(directoryName)) { + new_datafile.getFileMetadata().setDirectoryLabel(directoryName); + } + } + } + if (new_datafile != null) { + datafiles.add(new_datafile); + // todo: can this new_datafile be null? + storageQuotaLimitForRezippedFiles = storageQuotaLimitForRezippedFiles - new_datafile.getFilesize(); + } else { + logger.severe("Could not add part of rezipped shapefile. new_datafile was null: " + finalFile.getName()); + } + try { + finalFileInputStream.close(); + } catch (IOException ioex) { + // this one can be ignored + } + + } + } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { + logger.severe("One of the unzipped shape files exceeded the size limit, or the storage quota; giving up. " + femsx.getMessage()); + datafiles.clear(); + // (or should we throw an exception, instead of skipping it quietly? + } catch (IOException ioex) { + throw new CommandExecutionException("Failed to process one of the components of the unpacked shape file", ioex, this); + // todo? - maybe try to provide a more detailed explanation, of which repackaged component, etc.? + } + + // Delete the temp directory used for unzipping + // The try-catch is due to error encountered in using NFS for stocking file, + // cf. https://github.com/IQSS/dataverse/issues/5909 + try { + FileUtils.deleteDirectory(rezipFolder); + } catch (IOException ioex) { + // do nothing - it's a temp folder. + logger.warning("Could not remove temp folder, error message : " + ioex.getMessage()); + } + + if (datafiles.size() > 0) { + // remove the uploaded zip file: + try { + Files.delete(tempFile); + } catch (IOException ioex) { + // ignore - it's just a temp file - but let's log a warning + logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); + } catch (SecurityException se) { + // same + logger.warning("Unable to delete: " + tempFile.toString() + "due to Security Exception: " + + se.getMessage()); + } + return CreateDataFileResult.success(fileName, finalType, datafiles); + } else { + logger.severe("No files added from directory of rezipped shapefiles"); + } + return CreateDataFileResult.error(fileName, finalType); + + } else if (finalType.equalsIgnoreCase(BagItFileHandler.FILE_TYPE)) { + + try { + Optional bagItFileHandler = CDI.current().select(BagItFileHandlerFactory.class).get().getBagItFileHandler(); + if (bagItFileHandler.isPresent()) { + CreateDataFileResult result = bagItFileHandler.get().handleBagItPackage(ctxt.systemConfig(), version, fileName, tempFile.toFile()); + return result; + } + } catch (IOException ioex) { + throw new CommandExecutionException("Failed to process uploaded BagIt file", ioex, this); + } + } + } else { + // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied + finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; + String type = determineFileTypeByNameAndExtension(fileName); + if (!StringUtils.isBlank(type)) { + //Use rules for deciding when to trust browser supplied type + if (useRecognizedType(finalType, type)) { + finalType = type; + } + logger.fine("Supplied type: " + suppliedContentType + ", finalType: " + finalType); + } + } + // Finally, if none of the special cases above were applicable (or + // if we were unable to unpack an uploaded file, etc.), we'll just + // create and return a single DataFile: + File newFile = null; + if (tempFile != null) { + newFile = tempFile.toFile(); + } + + // We have already checked that this file does not exceed the individual size limit; + // but if we are processing it as is, as a single file, we need to check if + // its size does not go beyond the allocated storage quota (if specified): + + long fileSize = newFile.length(); + + if (storageQuotaLimit != null && fileSize > storageQuotaLimit) { + try { + tempFile.toFile().delete(); + } catch (Exception ex) { + // ignore - but log a warning + logger.warning("Could not remove temp file " + tempFile.getFileName()); + } + throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit)), this); + } + + DataFile datafile = FileUtil.createSingleDataFile(version, newFile, newStorageIdentifier, fileName, finalType, newCheckSumType, newCheckSum); + File f = null; + if (tempFile != null) { + f = tempFile.toFile(); + } + if (datafile != null && ((f != null) || (newStorageIdentifier != null))) { + + if (warningMessage != null) { + createIngestFailureReport(datafile, warningMessage); + datafile.SetIngestProblem(); + } + datafiles.add(datafile); + + return CreateDataFileResult.success(fileName, finalType, datafiles); + } + + return CreateDataFileResult.error(fileName, finalType); + } // end createDataFiles +} diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index d84e18d5931..7f44b4c6a0d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -576,7 +576,15 @@ Whether Harvesting (OAI) service is enabled /** * The URL for the DvWebLoader tool (see github.com/gdcc/dvwebloader for details) */ - WebloaderUrl + WebloaderUrl, + /** + * Enforce storage quotas: + */ + UseStorageQuotas, + /** + * Placeholder storage quota (defines the same quota setting for every user; used to test the concept of a quota. + */ + StorageQuotaSizeInBytes ; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index c600abfd409..ba24472b314 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -105,6 +105,7 @@ import edu.harvard.iq.dataverse.dataaccess.DataAccessOption; import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker; import java.util.Arrays; import org.apache.commons.io.IOUtils; @@ -411,7 +412,7 @@ public static String getUserFriendlyOriginalType(DataFile dataFile) { * Returns a content type string for a FileObject * */ - private static String determineContentType(File fileObject) { + public static String determineContentType(File fileObject) { if (fileObject==null){ return null; } @@ -902,7 +903,7 @@ public static CreateDataFileResult createDataFiles(DatasetVersion version, Input uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile())); File unZippedTempFile = saveInputStreamInTempFile(uncompressedIn, fileSizeLimit); datafile = createSingleDataFile(version, unZippedTempFile, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT, systemConfig.getFileFixityChecksumAlgorithm()); - } catch (IOException | FileExceedsMaxSizeException ioex) { + } catch (IOException | FileExceedsMaxSizeException | FileExceedsStorageQuotaException ioex) { datafile = null; } finally { if (uncompressedIn != null) { @@ -1068,7 +1069,7 @@ public static CreateDataFileResult createDataFiles(DatasetVersion version, Input } datafiles.clear(); - } catch (FileExceedsMaxSizeException femsx) { + } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { logger.warning("One of the unzipped files exceeds the size limit; resorting to saving the file as is. " + femsx.getMessage()); warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.size", Arrays.asList(FileSizeChecker.bytesToHumanReadable(fileSizeLimit))); datafiles.clear(); @@ -1154,7 +1155,7 @@ public static CreateDataFileResult createDataFiles(DatasetVersion version, Input finalFileInputStream.close(); } - } catch (FileExceedsMaxSizeException femsx) { + } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { logger.severe("One of the unzipped shape files exceeded the size limit; giving up. " + femsx.getMessage()); datafiles.clear(); } @@ -1271,7 +1272,12 @@ public static boolean useRecognizedType(String suppliedContentType, String recog } public static File saveInputStreamInTempFile(InputStream inputStream, Long fileSizeLimit) - throws IOException, FileExceedsMaxSizeException { + throws IOException, FileExceedsMaxSizeException, FileExceedsStorageQuotaException { + return saveInputStreamInTempFile(inputStream, fileSizeLimit, null); + } + + public static File saveInputStreamInTempFile(InputStream inputStream, Long fileSizeLimit, Long storageQuotaLimit) + throws IOException, FileExceedsMaxSizeException, FileExceedsStorageQuotaException { Path tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload"); if (inputStream != null && tempFile != null) { @@ -1285,6 +1291,11 @@ public static File saveInputStreamInTempFile(InputStream inputStream, Long fileS throw new FileExceedsMaxSizeException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit))); } + if (storageQuotaLimit != null && fileSize > storageQuotaLimit) { + try {tempFile.toFile().delete();} catch (Exception ex) {} + throw new FileExceedsStorageQuotaException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_quota"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit))); + } + return tempFile.toFile(); } throw new IOException("Failed to save uploaded file."); @@ -1325,7 +1336,6 @@ public static DataFile createSingleDataFile(DatasetVersion version, File tempFil datafile.setPermissionModificationTime(new Timestamp(new Date().getTime())); FileMetadata fmd = new FileMetadata(); - // TODO: add directoryLabel? fmd.setLabel(fileName); if (addToDataset) { @@ -1341,13 +1351,13 @@ public static DataFile createSingleDataFile(DatasetVersion version, File tempFil fmd.setDatasetVersion(version); version.getDataset().getFiles().add(datafile); } - if(storageIdentifier==null) { - generateStorageIdentifier(datafile); - if (!tempFile.renameTo(new File(getFilesTempDirectory() + "/" + datafile.getStorageIdentifier()))) { - return null; - } + if (storageIdentifier == null) { + generateStorageIdentifier(datafile); + if (!tempFile.renameTo(new File(getFilesTempDirectory() + "/" + datafile.getStorageIdentifier()))) { + return null; + } } else { - datafile.setStorageIdentifier(storageIdentifier); + datafile.setStorageIdentifier(storageIdentifier); } if ((checksum !=null)&&(!checksum.isEmpty())) { @@ -1372,7 +1382,7 @@ public static DataFile createSingleDataFile(DatasetVersion version, File tempFil Naming convention: getFilesTempDirectory() + "shp_" + "yyyy-MM-dd-hh-mm-ss-SSS" */ - private static File getShapefileUnzipTempDirectory(){ + public static File getShapefileUnzipTempDirectory(){ String tempDirectory = getFilesTempDirectory(); if (tempDirectory == null){ diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index c989add6e3d..ac4a3970379 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -892,7 +892,7 @@ public String toString() { } } - + public boolean isPublicInstall(){ boolean saneDefault = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.PublicInstall, saneDefault); @@ -1149,4 +1149,18 @@ public boolean isSignupDisabledForRemoteAuthProvider(String providerId) { return !ret; } + + public boolean isStorageQuotasEnforced() { + return settingsService.isTrueForKey(SettingsServiceBean.Key.UseStorageQuotas, false); + } + + /** + * This method should only be used for testing of the new storage quota + * mechanism, temporarily. (it uses the same value as the quota for + * *everybody* regardless of the circumstances, defined as a database + * setting) + */ + public Long getTestStorageQuotaLimit() { + return settingsService.getValueForKeyAsLong(SettingsServiceBean.Key.StorageQuotaSizeInBytes); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java index 2bcac04076a..ecb34bdcfb5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; +import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; import edu.harvard.iq.dataverse.util.FileUtil; import java.io.File; @@ -43,7 +44,11 @@ public void deleteFile(Path filePath) { } public File saveInputStreamInTempFile(InputStream inputStream, Long fileSizeLimit) throws IOException, FileExceedsMaxSizeException { - return FileUtil.saveInputStreamInTempFile(inputStream, fileSizeLimit); + try { + return FileUtil.saveInputStreamInTempFile(inputStream, fileSizeLimit); + } catch (FileExceedsStorageQuotaException fesqx) { + return null; + } } public String determineFileType(File file, String fileName) throws IOException { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/file/FileExceedsStorageQuotaException.java b/src/main/java/edu/harvard/iq/dataverse/util/file/FileExceedsStorageQuotaException.java new file mode 100644 index 00000000000..29eeca254f7 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/util/file/FileExceedsStorageQuotaException.java @@ -0,0 +1,22 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.util.file; + +/** + * + * @author landreev + */ +public class FileExceedsStorageQuotaException extends Exception { + + public FileExceedsStorageQuotaException(String message) { + super(message); + } + + public FileExceedsStorageQuotaException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 45807dc7cde..c1fd4ebaf10 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2145,6 +2145,7 @@ file.message.replaceSuccess=The file has been replaced. file.addreplace.file_size_ok=File size is in range. file.addreplace.error.byte_abrev=B file.addreplace.error.file_exceeds_limit=This file size ({0}) exceeds the size limit of {1}. +file.addreplace.error.quota_exceeded=This file (size {0}) exceeds the remaining storage quota of {1}. file.addreplace.error.dataset_is_null=The dataset cannot be null. file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null. file.addreplace.error.parsing=Error in parsing provided json diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index a4e635b8c14..99db5abd2dc 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -155,6 +155,7 @@ fileLimit="#{EditDatafilesPage.getMaxNumberOfFiles()}" invalidSizeMessage="#{bundle['file.edit.error.file_exceeds_limit']}" sequential="true" + previewWidth="-1" widgetVar="fileUploadWidget"> From d5fd5e3e690f0b39d630b4774ec2807b2ec08750 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 23 Feb 2023 18:35:56 -0500 Subject: [PATCH 028/396] switched to the new Create Files command in the remaining places where the utility was used. #9361 --- .../iq/dataverse/EditDatafilesPage.java | 18 +++--- .../datadeposit/MediaResourceManagerImpl.java | 58 ++++++++++--------- .../datasetutility/AddReplaceFileHelper.java | 12 ++-- .../harvard/iq/dataverse/util/FileUtil.java | 6 +- 4 files changed, 54 insertions(+), 40 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 74c4e782d56..928bf635ffa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -1508,14 +1508,16 @@ public void handleDropBoxUpload(ActionEvent event) { // for example, multiple files can be extracted from an uncompressed // zip file. //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); - CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null, systemConfig); + //CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null, systemConfig); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null); + CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage)); - } catch (IOException ex) { + } catch (CommandException ex) { this.logger.log(Level.SEVERE, "Error during ingest of DropBox file {0} from link {1}", new Object[]{fileName, fileLink}); continue; - }/*catch (FileExceedsMaxSizeException ex){ + } /*catch (FileExceedsMaxSizeException ex){ this.logger.log(Level.SEVERE, "Error during ingest of DropBox file {0} from link {1}: {2}", new Object[]{fileName, fileLink, ex.getMessage()}); continue; }*/ finally { @@ -2040,8 +2042,7 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { // zip file. ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig); - Command cmd; - cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); @@ -2165,10 +2166,13 @@ public void handleExternalUpload() { // for example, multiple files can be extracted from an uncompressed // zip file. //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); - CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType, systemConfig); + ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType, systemConfig); + + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType); + CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage)); - } catch (IOException ex) { + } catch (CommandException ex) { logger.log(Level.SEVERE, "Error during ingest of file {0}", new Object[]{fileName}); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index 5491024c73c..f21a65bdf1e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -6,14 +6,17 @@ import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseRequestServiceBean; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.PermissionServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; +import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDataFilesCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -69,6 +72,8 @@ public class MediaResourceManagerImpl implements MediaResourceManager { SwordAuth swordAuth; @Inject UrlManager urlManager; + @Inject + DataverseRequestServiceBean dvRequestService; private HttpServletRequest httpRequest; @@ -301,37 +306,38 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au */ String guessContentTypeForMe = null; List dataFiles = new ArrayList<>(); + try { - try { - CreateDataFileResult createDataFilesResponse = FileUtil.createDataFiles(editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null, systemConfig); - dataFiles = createDataFilesResponse.getDataFiles(); - } catch (EJBException ex) { - Throwable cause = ex.getCause(); - if (cause != null) { - if (cause instanceof IllegalArgumentException) { - /** - * @todo should be safe to remove this catch of - * EJBException and IllegalArgumentException once - * this ticket is resolved: - * - * IllegalArgumentException: MALFORMED when - * uploading certain zip files - * https://github.com/IQSS/dataverse/issues/1021 - */ - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles. Problem with zip file, perhaps: " + cause); - } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles: " + cause); - } + //CreateDataFileResult createDataFilesResponse = FileUtil.createDataFiles(editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null, systemConfig); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null); + CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); + dataFiles = createDataFilesResult.getDataFiles(); + } catch (CommandException ex) { + Throwable cause = ex.getCause(); + if (cause != null) { + if (cause instanceof IllegalArgumentException) { + /** + * @todo should be safe to remove this catch of + * EJBException and IllegalArgumentException once this + * ticket is resolved: + * + * IllegalArgumentException: MALFORMED when uploading + * certain zip files + * https://github.com/IQSS/dataverse/issues/1021 + */ + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset. Problem with zip file, perhaps: " + cause); } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles. No cause: " + ex.getMessage()); + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + cause); } - } /*TODO: L.A. 4.6! catch (FileExceedsMaxSizeException ex) { + } else { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + ex.getMessage()); + } + } + /*TODO: L.A. 4.6! catch (FileExceedsMaxSizeException ex) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles: " + ex.getMessage()); //Logger.getLogger(MediaResourceManagerImpl.class.getName()).log(Level.SEVERE, null, ex); - }*/ - } catch (IOException ex) { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + ex.getMessage()); - } + }*/ + if (!dataFiles.isEmpty()) { Set constraintViolations = editVersion.validate(); if (constraintViolations.size() > 0) { diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index 1d0ec0f19d9..e31f86093ed 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -63,6 +63,7 @@ import static edu.harvard.iq.dataverse.api.AbstractApiBean.STATUS_ERROR; import static edu.harvard.iq.dataverse.api.AbstractApiBean.STATUS_OK; +import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDataFilesCommand; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; /** @@ -1205,17 +1206,20 @@ private boolean step_030_createNewFilesViaIngest(){ clone = workingVersion.cloneDatasetVersion(); } try { - CreateDataFileResult result = FileUtil.createDataFiles(workingVersion, + /*CreateDataFileResult result = FileUtil.createDataFiles(workingVersion, this.newFileInputStream, this.newFileName, this.newFileContentType, this.newStorageIdentifier, this.newCheckSum, this.newCheckSumType, - this.systemConfig); - initialFileList = result.getDataFiles(); + this.systemConfig);*/ + + Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, newCheckSum, newCheckSumType); + CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); + initialFileList = createDataFilesResult.getDataFiles(); - } catch (IOException ex) { + } catch (CommandException ex) { if (!Strings.isNullOrEmpty(ex.getMessage())) { this.addErrorSevere(getBundleErr("ingest_create_file_err") + " " + ex.getMessage()); } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index ba24472b314..0c099242849 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -805,7 +805,7 @@ public static String generateOriginalExtension(String fileType) { return ""; } - public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, + /*public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, SystemConfig systemConfig) throws IOException { ChecksumType checkSumType = DataFile.ChecksumType.MD5; @@ -813,7 +813,7 @@ public static CreateDataFileResult createDataFiles(DatasetVersion version, Input checkSumType = systemConfig.getFileFixityChecksumAlgorithm(); } return createDataFiles(version, inputStream, fileName, suppliedContentType, newStorageIdentifier, newCheckSum, checkSumType, systemConfig); - } + }*/ public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, ChecksumType newCheckSumType, SystemConfig systemConfig) throws IOException { List datafiles = new ArrayList<>(); @@ -1293,7 +1293,7 @@ public static File saveInputStreamInTempFile(InputStream inputStream, Long fileS if (storageQuotaLimit != null && fileSize > storageQuotaLimit) { try {tempFile.toFile().delete();} catch (Exception ex) {} - throw new FileExceedsStorageQuotaException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_quota"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit))); + throw new FileExceedsStorageQuotaException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit))); } return tempFile.toFile(); From 6210c3435ff7df308a6491c5b9a0b0b23d758774 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 23 Feb 2023 18:43:23 -0500 Subject: [PATCH 029/396] removed the static utility methods that have been turned into a command. #9361 --- .../harvard/iq/dataverse/util/FileUtil.java | 430 ------------------ 1 file changed, 430 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 0c099242849..014f44c5c33 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -804,436 +804,6 @@ public static String generateOriginalExtension(String fileType) { } return ""; } - - /*public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, - String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, - SystemConfig systemConfig) throws IOException { - ChecksumType checkSumType = DataFile.ChecksumType.MD5; - if (newStorageIdentifier == null) { - checkSumType = systemConfig.getFileFixityChecksumAlgorithm(); - } - return createDataFiles(version, inputStream, fileName, suppliedContentType, newStorageIdentifier, newCheckSum, checkSumType, systemConfig); - }*/ - - public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, ChecksumType newCheckSumType, SystemConfig systemConfig) throws IOException { - List datafiles = new ArrayList<>(); - - //When there is no checksum/checksumtype being sent (normal upload, needs to be calculated), set the type to the current default - if(newCheckSumType == null) { - newCheckSumType = systemConfig.getFileFixityChecksumAlgorithm(); - } - - String warningMessage = null; - - // save the file, in the temporary location for now: - Path tempFile = null; - - Long fileSizeLimit = systemConfig.getMaxFileUploadSizeForStore(version.getDataset().getEffectiveStorageDriverId()); - String finalType = null; - if (newStorageIdentifier == null) { - if (getFilesTempDirectory() != null) { - tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload"); - // "temporary" location is the key here; this is why we are not using - // the DataStore framework for this - the assumption is that - // temp files will always be stored on the local filesystem. - // -- L.A. Jul. 2014 - logger.fine("Will attempt to save the file as: " + tempFile.toString()); - Files.copy(inputStream, tempFile, StandardCopyOption.REPLACE_EXISTING); - - // A file size check, before we do anything else: - // (note that "no size limit set" = "unlimited") - // (also note, that if this is a zip file, we'll be checking - // the size limit for each of the individual unpacked files) - Long fileSize = tempFile.toFile().length(); - if (fileSizeLimit != null && fileSize > fileSizeLimit) { - try { - tempFile.toFile().delete(); - } catch (Exception ex) { - } - throw new IOException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit))); - } - - } else { - throw new IOException("Temp directory is not configured."); - } - logger.fine("mime type supplied: " + suppliedContentType); - // Let's try our own utilities (Jhove, etc.) to determine the file type - // of the uploaded file. (We may already have a mime type supplied for this - // file - maybe the type that the browser recognized on upload; or, if - // it's a harvest, maybe the remote server has already given us the type - // for this file... with our own type utility we may or may not do better - // than the type supplied: - // -- L.A. - String recognizedType = null; - - try { - recognizedType = determineFileType(tempFile.toFile(), fileName); - logger.fine("File utility recognized the file as " + recognizedType); - if (recognizedType != null && !recognizedType.equals("")) { - if (useRecognizedType(suppliedContentType, recognizedType)) { - finalType = recognizedType; - } - } - - } catch (Exception ex) { - logger.warning("Failed to run the file utility mime type check on file " + fileName); - } - - if (finalType == null) { - finalType = (suppliedContentType == null || suppliedContentType.equals("")) - ? MIME_TYPE_UNDETERMINED_DEFAULT - : suppliedContentType; - } - - // A few special cases: - // if this is a gzipped FITS file, we'll uncompress it, and ingest it as - // a regular FITS file: - if (finalType.equals("application/fits-gzipped")) { - - InputStream uncompressedIn = null; - String finalFileName = fileName; - // if the file name had the ".gz" extension, remove it, - // since we are going to uncompress it: - if (fileName != null && fileName.matches(".*\\.gz$")) { - finalFileName = fileName.replaceAll("\\.gz$", ""); - } - - DataFile datafile = null; - try { - uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile())); - File unZippedTempFile = saveInputStreamInTempFile(uncompressedIn, fileSizeLimit); - datafile = createSingleDataFile(version, unZippedTempFile, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT, systemConfig.getFileFixityChecksumAlgorithm()); - } catch (IOException | FileExceedsMaxSizeException | FileExceedsStorageQuotaException ioex) { - datafile = null; - } finally { - if (uncompressedIn != null) { - try { - uncompressedIn.close(); - } catch (IOException e) { - } - } - } - - // If we were able to produce an uncompressed file, we'll use it - // to create and return a final DataFile; if not, we're not going - // to do anything - and then a new DataFile will be created further - // down, from the original, uncompressed file. - if (datafile != null) { - // remove the compressed temp file: - try { - tempFile.toFile().delete(); - } catch (SecurityException ex) { - // (this is very non-fatal) - logger.warning("Failed to delete temporary file " + tempFile.toString()); - } - - datafiles.add(datafile); - return CreateDataFileResult.success(fileName, finalType, datafiles); - } - - // If it's a ZIP file, we are going to unpack it and create multiple - // DataFile objects from its contents: - } else if (finalType.equals("application/zip")) { - - ZipInputStream unZippedIn = null; - ZipEntry zipEntry = null; - - int fileNumberLimit = systemConfig.getZipUploadFilesLimit(); - - try { - Charset charset = null; - /* - TODO: (?) - We may want to investigate somehow letting the user specify - the charset for the filenames in the zip file... - - otherwise, ZipInputStream bails out if it encounteres a file - name that's not valid in the current charest (i.e., UTF-8, in - our case). It would be a bit trickier than what we're doing for - SPSS tabular ingests - with the lang. encoding pulldown menu - - because this encoding needs to be specified *before* we upload and - attempt to unzip the file. - -- L.A. 4.0 beta12 - logger.info("default charset is "+Charset.defaultCharset().name()); - if (Charset.isSupported("US-ASCII")) { - logger.info("charset US-ASCII is supported."); - charset = Charset.forName("US-ASCII"); - if (charset != null) { - logger.info("was able to obtain charset for US-ASCII"); - } - - } - */ - - if (charset != null) { - unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset); - } else { - unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile())); - } - - while (true) { - try { - zipEntry = unZippedIn.getNextEntry(); - } catch (IllegalArgumentException iaex) { - // Note: - // ZipInputStream documentation doesn't even mention that - // getNextEntry() throws an IllegalArgumentException! - // but that's what happens if the file name of the next - // entry is not valid in the current CharSet. - // -- L.A. - warningMessage = "Failed to unpack Zip file. (Unknown Character Set used in a file name?) Saving the file as is."; - logger.warning(warningMessage); - throw new IOException(); - } - - if (zipEntry == null) { - break; - } - // Note that some zip entries may be directories - we - // simply skip them: - - if (!zipEntry.isDirectory()) { - if (datafiles.size() > fileNumberLimit) { - logger.warning("Zip upload - too many files."); - warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit - + "); please upload a zip archive with fewer files, if you want them to be ingested " - + "as individual DataFiles."; - throw new IOException(); - } - - String fileEntryName = zipEntry.getName(); - logger.fine("ZipEntry, file: " + fileEntryName); - - if (fileEntryName != null && !fileEntryName.equals("")) { - - String shortName = fileEntryName.replaceFirst("^.*[\\/]", ""); - - // Check if it's a "fake" file - a zip archive entry - // created for a MacOS X filesystem element: (these - // start with "._") - if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) { - // OK, this seems like an OK file entry - we'll try - // to read it and create a DataFile with it: - - File unZippedTempFile = saveInputStreamInTempFile(unZippedIn, fileSizeLimit); - DataFile datafile = createSingleDataFile(version, unZippedTempFile, null, shortName, - MIME_TYPE_UNDETERMINED_DEFAULT, - systemConfig.getFileFixityChecksumAlgorithm(), null, false); - - if (!fileEntryName.equals(shortName)) { - // If the filename looks like a hierarchical folder name (i.e., contains slashes and backslashes), - // we'll extract the directory name; then subject it to some "aggressive sanitizing" - strip all - // the leading, trailing and duplicate slashes; then replace all the characters that - // don't pass our validation rules. - String directoryName = fileEntryName.replaceFirst("[\\\\/][\\\\/]*[^\\\\/]*$", ""); - directoryName = StringUtil.sanitizeFileDirectory(directoryName, true); - // if (!"".equals(directoryName)) { - if (!StringUtil.isEmpty(directoryName)) { - logger.fine("setting the directory label to " + directoryName); - datafile.getFileMetadata().setDirectoryLabel(directoryName); - } - } - - if (datafile != null) { - // We have created this datafile with the mime type "unknown"; - // Now that we have it saved in a temporary location, - // let's try and determine its real type: - - String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier(); - - try { - recognizedType = determineFileType(new File(tempFileName), shortName); - logger.fine("File utility recognized unzipped file as " + recognizedType); - if (recognizedType != null && !recognizedType.equals("")) { - datafile.setContentType(recognizedType); - } - } catch (Exception ex) { - logger.warning("Failed to run the file utility mime type check on file " + fileName); - } - - datafiles.add(datafile); - } - } - } - } - unZippedIn.closeEntry(); - - } - - } catch (IOException ioex) { - // just clear the datafiles list and let - // ingest default to creating a single DataFile out - // of the unzipped file. - logger.warning("Unzipping failed; rolling back to saving the file as is."); - if (warningMessage == null) { - warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed"); - } - - datafiles.clear(); - } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { - logger.warning("One of the unzipped files exceeds the size limit; resorting to saving the file as is. " + femsx.getMessage()); - warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.size", Arrays.asList(FileSizeChecker.bytesToHumanReadable(fileSizeLimit))); - datafiles.clear(); - } finally { - if (unZippedIn != null) { - try { - unZippedIn.close(); - } catch (Exception zEx) { - } - } - } - if (datafiles.size() > 0) { - // link the data files to the dataset/version: - // (except we no longer want to do this! -- 4.6) - /*Iterator itf = datafiles.iterator(); - while (itf.hasNext()) { - DataFile datafile = itf.next(); - datafile.setOwner(version.getDataset()); - if (version.getFileMetadatas() == null) { - version.setFileMetadatas(new ArrayList()); - } - version.getFileMetadatas().add(datafile.getFileMetadata()); - datafile.getFileMetadata().setDatasetVersion(version); - - version.getDataset().getFiles().add(datafile); - } */ - // remove the uploaded zip file: - try { - Files.delete(tempFile); - } catch (IOException ioex) { - // do nothing - it's just a temp file. - logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); - } - // and return: - return CreateDataFileResult.success(fileName, finalType, datafiles); - } - - } else if (finalType.equalsIgnoreCase(ShapefileHandler.SHAPEFILE_FILE_TYPE)) { - // Shape files may have to be split into multiple files, - // one zip archive per each complete set of shape files: - - // File rezipFolder = new File(this.getFilesTempDirectory()); - File rezipFolder = getShapefileUnzipTempDirectory(); - - IngestServiceShapefileHelper shpIngestHelper; - shpIngestHelper = new IngestServiceShapefileHelper(tempFile.toFile(), rezipFolder); - - boolean didProcessWork = shpIngestHelper.processFile(); - if (!(didProcessWork)) { - logger.severe("Processing of zipped shapefile failed."); - return CreateDataFileResult.error(fileName, finalType); - } - - try { - for (File finalFile : shpIngestHelper.getFinalRezippedFiles()) { - FileInputStream finalFileInputStream = new FileInputStream(finalFile); - finalType = determineContentType(finalFile); - if (finalType == null) { - logger.warning("Content type is null; but should default to 'MIME_TYPE_UNDETERMINED_DEFAULT'"); - continue; - } - - File unZippedShapeTempFile = saveInputStreamInTempFile(finalFileInputStream, fileSizeLimit); - DataFile new_datafile = createSingleDataFile(version, unZippedShapeTempFile, finalFile.getName(), finalType, systemConfig.getFileFixityChecksumAlgorithm()); - String directoryName = null; - String absolutePathName = finalFile.getParent(); - if (absolutePathName != null) { - if (absolutePathName.length() > rezipFolder.toString().length()) { - // This file lives in a subfolder - we want to - // preserve it in the FileMetadata: - directoryName = absolutePathName.substring(rezipFolder.toString().length() + 1); - - if (!StringUtil.isEmpty(directoryName)) { - new_datafile.getFileMetadata().setDirectoryLabel(directoryName); - } - } - } - if (new_datafile != null) { - datafiles.add(new_datafile); - } else { - logger.severe("Could not add part of rezipped shapefile. new_datafile was null: " + finalFile.getName()); - } - finalFileInputStream.close(); - - } - } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { - logger.severe("One of the unzipped shape files exceeded the size limit; giving up. " + femsx.getMessage()); - datafiles.clear(); - } - - // Delete the temp directory used for unzipping - // The try-catch is due to error encountered in using NFS for stocking file, - // cf. https://github.com/IQSS/dataverse/issues/5909 - try { - FileUtils.deleteDirectory(rezipFolder); - } catch (IOException ioex) { - // do nothing - it's a tempo folder. - logger.warning("Could not remove temp folder, error message : " + ioex.getMessage()); - } - - if (datafiles.size() > 0) { - // remove the uploaded zip file: - try { - Files.delete(tempFile); - } catch (IOException ioex) { - // do nothing - it's just a temp file. - logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); - } catch (SecurityException se) { - logger.warning("Unable to delete: " + tempFile.toString() + "due to Security Exception: " - + se.getMessage()); - } - return CreateDataFileResult.success(fileName, finalType, datafiles); - } else { - logger.severe("No files added from directory of rezipped shapefiles"); - } - return CreateDataFileResult.error(fileName, finalType); - - } else if (finalType.equalsIgnoreCase(BagItFileHandler.FILE_TYPE)) { - Optional bagItFileHandler = CDI.current().select(BagItFileHandlerFactory.class).get().getBagItFileHandler(); - if (bagItFileHandler.isPresent()) { - CreateDataFileResult result = bagItFileHandler.get().handleBagItPackage(systemConfig, version, fileName, tempFile.toFile()); - return result; - } - } - } else { - // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied - finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; - String type = determineFileTypeByNameAndExtension(fileName); - if (!StringUtils.isBlank(type)) { - //Use rules for deciding when to trust browser supplied type - if (useRecognizedType(finalType, type)) { - finalType = type; - } - logger.fine("Supplied type: " + suppliedContentType + ", finalType: " + finalType); - } - } - // Finally, if none of the special cases above were applicable (or - // if we were unable to unpack an uploaded file, etc.), we'll just - // create and return a single DataFile: - File newFile = null; - if (tempFile != null) { - newFile = tempFile.toFile(); - } - - - DataFile datafile = createSingleDataFile(version, newFile, newStorageIdentifier, fileName, finalType, newCheckSumType, newCheckSum); - File f = null; - if (tempFile != null) { - f = tempFile.toFile(); - } - if (datafile != null && ((f != null) || (newStorageIdentifier != null))) { - - if (warningMessage != null) { - createIngestFailureReport(datafile, warningMessage); - datafile.SetIngestProblem(); - } - datafiles.add(datafile); - - return CreateDataFileResult.success(fileName, finalType, datafiles); - } - - return CreateDataFileResult.error(fileName, finalType); - } // end createDataFiles - public static boolean useRecognizedType(String suppliedContentType, String recognizedType) { // is it any better than the type that was supplied to us, From 1a22b11c65353f7c2bd0677b2f4bb2e134aebcb5 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 24 Feb 2023 17:02:39 -0500 Subject: [PATCH 030/396] Added info messages about the remaining storage quota, if enforced, for the user on the upload page. #9361 --- .../iq/dataverse/EditDatafilesPage.java | 20 +++++++++++++++---- src/main/java/propertyFiles/Bundle.properties | 3 ++- src/main/webapp/editFilesFragment.xhtml | 5 +++++ 3 files changed, 23 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 928bf635ffa..420642f2fa5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -343,6 +343,11 @@ public Long getMaxFileUploadSizeInBytes() { public String getHumanMaxFileUploadSizeInBytes() { return FileSizeChecker.bytesToHumanReadable(this.maxFileUploadSizeInBytes); } + + public boolean isUnlimitedUploadFileSize() { + + return this.maxFileUploadSizeInBytes == null; + } public Long getMaxTotalUploadSizeInBytes() { return maxTotalUploadSizeInBytes; @@ -351,10 +356,9 @@ public Long getMaxTotalUploadSizeInBytes() { public String getHumanMaxTotalUploadSizeInBytes() { return FileSizeChecker.bytesToHumanReadable(maxTotalUploadSizeInBytes); } - - public boolean isUnlimitedUploadFileSize() { - - return this.maxFileUploadSizeInBytes == null; + + public boolean isStorageQuotaEnforced() { + return maxTotalUploadSizeInBytes != null; } public Long getMaxIngestSizeInBytes() { @@ -524,6 +528,11 @@ public String initCreateMode(String modeToken, DatasetVersion version, MutableBo selectedFiles = selectedFileMetadatasList; this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); + if (systemConfig.isStorageQuotasEnforced()) { + this.maxTotalUploadSizeInBytes = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes(); + } else { + this.maxTotalUploadSizeInBytes = null; + } this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit(); @@ -575,6 +584,9 @@ public String init() { clone = workingVersion.cloneDatasetVersion(); this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); + if (systemConfig.isStorageQuotasEnforced()) { + this.maxTotalUploadSizeInBytes = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes(); + } this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit(); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index c1fd4ebaf10..dd9b398b709 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1659,7 +1659,8 @@ file.select.tooltip=Select Files file.selectAllFiles=Select all {0} files in this dataset. file.dynamicCounter.filesPerPage=Files Per Page file.selectToAddBtn=Select Files to Add -file.selectToAdd.tipLimit=File upload limit is {0} per file. +file.selectToAdd.tipLimit=File upload limit is {0} per file. +file.selectToAdd.tipQuotaRemaining=Storage quota: {0} remaining. file.selectToAdd.tipMaxNumFiles=Maximum of {0} {0, choice, 0#files|1#file|2#files} per upload. file.selectToAdd.tipTabularLimit=Tabular file ingest is limited to {2}. file.selectToAdd.tipPerFileTabularLimit=Ingest is limited to the following file sizes based on their format: {0}. diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 99db5abd2dc..77f7aab2f76 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -80,6 +80,11 @@ rendered="#{!EditDatafilesPage.isUnlimitedUploadFileSize()}"> + + + + From 10a475317771227b23263170ad3c97232764d14d Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 20 Mar 2023 10:48:20 -0400 Subject: [PATCH 031/396] An extra check, to disable the upload component right away, if the quota is already full/exceeded. #9361 --- src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java | 4 ++++ src/main/webapp/editFilesFragment.xhtml | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 420642f2fa5..c39e6f62ce2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -542,6 +542,10 @@ public String initCreateMode(String modeToken, DatasetVersion version, MutableBo saveEnabled = true; return null; } + + public boolean isQuotaExceeded() { + return systemConfig.isStorageQuotasEnforced() && datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes() == 0; + } public String init() { // default mode should be EDIT diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 77f7aab2f76..834ca597892 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -148,7 +148,7 @@ dragDropSupport="true" auto="#{!(systemConfig.directUploadEnabled(EditDatafilesPage.dataset))}" multiple="#{datasetPage || EditDatafilesPage.allowMultipleFileUpload()}" - disabled="#{lockedFromEdits || !(datasetPage || EditDatafilesPage.showFileUploadComponent()) }" + disabled="#{lockedFromEdits || !(datasetPage || EditDatafilesPage.showFileUploadComponent()) || EditDatafilesPage.isQuotaExceeded()}" listener="#{EditDatafilesPage.handleFileUpload}" process="filesTable" update=":datasetForm:filesTable, @([id$=filesButtons])" From 098de49c8ac14478ad01d9aaa2f820e3d9ab744d Mon Sep 17 00:00:00 2001 From: Eryk Kulikowski Date: Fri, 21 Apr 2023 16:10:41 +0200 Subject: [PATCH 032/396] reverted SystemConfig.java changes --- .../iq/dataverse/util/SystemConfig.java | 299 ++++++++++-------- 1 file changed, 160 insertions(+), 139 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 1764abf4478..c989add6e3d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -73,7 +73,7 @@ public class SystemConfig { * The default number of datafiles that we allow to be created through * zip file upload. */ - private static final int defaultZipUploadFilesLimit = 1000; + private static final int defaultZipUploadFilesLimit = 1000; public static final long defaultZipDownloadLimit = 104857600L; // 100MB private static final int defaultMultipleUploadFilesLimit = 1000; private static final int defaultLoginSessionTimeout = 480; // = 8 hours @@ -81,21 +81,21 @@ public class SystemConfig { private String buildNumber = null; private static final String JVM_TIMER_SERVER_OPTION = "dataverse.timerServer"; - - private static final long DEFAULT_GUESTBOOK_RESPONSES_DISPLAY_LIMIT = 5000L; + + private static final long DEFAULT_GUESTBOOK_RESPONSES_DISPLAY_LIMIT = 5000L; private static final long DEFAULT_THUMBNAIL_SIZE_LIMIT_IMAGE = 3000000L; // 3 MB private static final long DEFAULT_THUMBNAIL_SIZE_LIMIT_PDF = 1000000L; // 1 MB - + public final static String DEFAULTCURATIONLABELSET = "DEFAULT"; public final static String CURATIONLABELSDISABLED = "DISABLED"; - + public String getVersion() { return getVersion(false); } - + // The return value is a "prviate static String", that should be initialized - // once, on the first call (see the code below)... But this is a @Stateless - // bean... so that would mean "once per thread"? - this would be a prime + // once, on the first call (see the code below)... But this is a @Stateless + // bean... so that would mean "once per thread"? - this would be a prime // candidate for being moved into some kind of an application-scoped caching // service... some CachingService @Singleton - ? (L.A. 5.8) public String getVersion(boolean withBuildNumber) { @@ -157,15 +157,15 @@ public String getSolrHostColonPort() { public boolean isProvCollectionEnabled() { String provCollectionEnabled = settingsService.getValueForKey(SettingsServiceBean.Key.ProvCollectionEnabled, null); - if ("true".equalsIgnoreCase(provCollectionEnabled)) { + if("true".equalsIgnoreCase(provCollectionEnabled)){ return true; } return false; } - + public int getMetricsCacheTimeoutMinutes() { - int defaultValue = 10080; // one week in minutes + int defaultValue = 10080; //one week in minutes SettingsServiceBean.Key key = SettingsServiceBean.Key.MetricsCacheTimeoutMinutes; String metricsCacheTimeString = settingsService.getValueForKey(key); if (metricsCacheTimeString != null) { @@ -183,7 +183,7 @@ public int getMetricsCacheTimeoutMinutes() { } return defaultValue; } - + public int getMinutesUntilConfirmEmailTokenExpires() { final int minutesInOneDay = 1440; final int reasonableDefault = minutesInOneDay; @@ -201,9 +201,34 @@ public int getMinutesUntilConfirmEmailTokenExpires() { logger.info("Returning " + reasonableDefault + " for " + key + " because value must be an integer greater than zero, not \"" + valueFromDatabase + "\"."); } } + logger.fine("Returning " + reasonableDefault + " for " + key); return reasonableDefault; } + /** + * The number of minutes for which a password reset token is valid. Can be + * overridden by {@link #PASSWORD_RESET_TIMEOUT_IN_MINUTES}. + */ + public static int getMinutesUntilPasswordResetTokenExpires() { + final int reasonableDefault = 60; + String configuredValueAsString = System.getProperty(PASSWORD_RESET_TIMEOUT_IN_MINUTES); + if (configuredValueAsString != null) { + int configuredValueAsInteger = 0; + try { + configuredValueAsInteger = Integer.parseInt(configuredValueAsString); + if (configuredValueAsInteger > 0) { + return configuredValueAsInteger; + } else { + logger.info(PASSWORD_RESET_TIMEOUT_IN_MINUTES + " is configured as a negative number \"" + configuredValueAsInteger + "\". Using default value instead: " + reasonableDefault); + return reasonableDefault; + } + } catch (NumberFormatException ex) { + logger.info("Unable to convert " + PASSWORD_RESET_TIMEOUT_IN_MINUTES + " from \"" + configuredValueAsString + "\" into an integer value: " + ex + ". Using default value " + reasonableDefault); + } + } + return reasonableDefault; + } + /** * Lookup (or construct) the designated URL of this instance from configuration. * @@ -226,6 +251,7 @@ public String getDataverseSiteUrl() { * * Can be defined as a complete URL via dataverse.siteUrl; or derived from the hostname * dataverse.fqdn and HTTPS. If none of these options is set, defaults to the + * {@link InetAddress#getLocalHost} and HTTPS. * * NOTE: This method does not provide any validation. * TODO: The behaviour of this method is subject to a later change, see @@ -257,13 +283,12 @@ public static String getDataverseSiteUrlStatic() { return null; } } - + /** - * URL Tracking: + * URL Tracking: */ public String getPageURLWithQueryString() { - return PrettyContext.getCurrentInstance().getRequestURL().toURL() - + PrettyContext.getCurrentInstance().getRequestQueryString().toQueryString(); + return PrettyContext.getCurrentInstance().getRequestURL().toURL() + PrettyContext.getCurrentInstance().getRequestQueryString().toQueryString(); } public String getGuidesBaseUrl() { @@ -322,44 +347,44 @@ public static int getIntLimitFromStringOrDefault(String limitSetting, Integer de /** * Download-as-zip size limit. - * returns defaultZipDownloadLimit if not specified; - * set to -1 to disable zip downloads. + * returns defaultZipDownloadLimit if not specified; + * set to -1 to disable zip downloads. */ public long getZipDownloadLimit() { String zipLimitOption = settingsService.getValueForKey(SettingsServiceBean.Key.ZipDownloadLimit); return getLongLimitFromStringOrDefault(zipLimitOption, defaultZipDownloadLimit); } - + public int getZipUploadFilesLimit() { String limitOption = settingsService.getValueForKey(SettingsServiceBean.Key.ZipUploadFilesLimit); return getIntLimitFromStringOrDefault(limitOption, defaultZipUploadFilesLimit); } - + /** - * Session timeout, in minutes. + * Session timeout, in minutes. * (default value provided) */ public int getLoginSessionTimeout() { return getIntLimitFromStringOrDefault( - settingsService.getValueForKey(SettingsServiceBean.Key.LoginSessionTimeout), - defaultLoginSessionTimeout); + settingsService.getValueForKey(SettingsServiceBean.Key.LoginSessionTimeout), + defaultLoginSessionTimeout); } - + /* ` the number of files the GUI user is allowed to upload in one batch, via drag-and-drop, or through the file select dialog - */ + */ public int getMultipleUploadFilesLimit() { String limitOption = settingsService.getValueForKey(SettingsServiceBean.Key.MultipleUploadFilesLimit); return getIntLimitFromStringOrDefault(limitOption, defaultMultipleUploadFilesLimit); } - + public long getGuestbookResponsesPageDisplayLimit() { String limitSetting = settingsService.getValueForKey(SettingsServiceBean.Key.GuestbookResponsesPageDisplayLimit); return getLongLimitFromStringOrDefault(limitSetting, DEFAULT_GUESTBOOK_RESPONSES_DISPLAY_LIMIT); } - - public long getUploadLogoSizeLimit() { + + public long getUploadLogoSizeLimit(){ return 500000; } @@ -372,10 +397,10 @@ public long getThumbnailSizeLimitPDF() { } public static long getThumbnailSizeLimit(String type) { - String option = null; - - // get options via jvm options - + String option = null; + + //get options via jvm options + if ("Image".equals(type)) { option = System.getProperty("dataverse.dataAccess.thumbnail.image.limit"); return getLongLimitFromStringOrDefault(option, DEFAULT_THUMBNAIL_SIZE_LIMIT_IMAGE); @@ -387,19 +412,19 @@ public static long getThumbnailSizeLimit(String type) { // Zero (0) means no limit. return getLongLimitFromStringOrDefault(option, 0L); } - + public boolean isThumbnailGenerationDisabledForType(String type) { return getThumbnailSizeLimit(type) == -1l; } - + public boolean isThumbnailGenerationDisabledForImages() { return isThumbnailGenerationDisabledForType("Image"); } - + public boolean isThumbnailGenerationDisabledForPDF() { return isThumbnailGenerationDisabledForType("PDF"); } - + public String getApplicationTermsOfUse() { String language = BundleUtil.getCurrentLocale().getLanguage(); String saneDefaultForAppTermsOfUse = BundleUtil.getStringFromBundle("system.app.terms"); @@ -407,9 +432,9 @@ public String getApplicationTermsOfUse() { // value, or as a better default than the saneDefaultForAppTermsOfUse if there // is no language-specific value String appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, saneDefaultForAppTermsOfUse); - // Now get the language-specific value if it exists + //Now get the language-specific value if it exists if (language != null && !language.equalsIgnoreCase(BundleUtil.getDefaultLocale().getLanguage())) { - appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, language, appTermsOfUse); + appTermsOfUse = settingsService.getValueForKey(SettingsServiceBean.Key.ApplicationTermsOfUse, language, appTermsOfUse); } return appTermsOfUse; } @@ -420,7 +445,7 @@ public String getApiTermsOfUse() { return apiTermsOfUse; } - // TODO: + // TODO: // remove this method! // pages should be using settingsWrapper.get(":ApplicationPrivacyPolicyUrl") instead. -- 4.2.1 public String getApplicationPrivacyPolicyUrl() { @@ -439,10 +464,10 @@ public boolean isFilesOnDatasetPageFromSolr() { return settingsService.isTrueForKey(SettingsServiceBean.Key.FilesOnDatasetPageFromSolr, safeDefaultIfKeyNotFound); } - public Long getMaxFileUploadSizeForStore(String driverId) { - return settingsService.getValueForCompoundKeyAsLong(SettingsServiceBean.Key.MaxFileUploadSizeInBytes, driverId); - } - + public Long getMaxFileUploadSizeForStore(String driverId){ + return settingsService.getValueForCompoundKeyAsLong(SettingsServiceBean.Key.MaxFileUploadSizeInBytes, driverId); + } + public Integer getSearchHighlightFragmentSize() { String fragSize = settingsService.getValueForKey(SettingsServiceBean.Key.SearchHighlightFragmentSize); if (fragSize != null) { @@ -456,12 +481,12 @@ public Integer getSearchHighlightFragmentSize() { } public long getTabularIngestSizeLimit() { - // This method will return the blanket ingestable size limit, if - // set on the system. I.e., the universal limit that applies to all - // tabular ingests, regardless of fromat: - - String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.TabularIngestSizeLimit); - + // This method will return the blanket ingestable size limit, if + // set on the system. I.e., the universal limit that applies to all + // tabular ingests, regardless of fromat: + + String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.TabularIngestSizeLimit); + if (limitEntry != null) { try { Long sizeOption = new Long(limitEntry); @@ -470,48 +495,48 @@ public long getTabularIngestSizeLimit() { logger.warning("Invalid value for TabularIngestSizeLimit option? - " + limitEntry); } } - // -1 means no limit is set; - // 0 on the other hand would mean that ingest is fully disabled for - // tabular data. - return -1; + // -1 means no limit is set; + // 0 on the other hand would mean that ingest is fully disabled for + // tabular data. + return -1; } - + public long getTabularIngestSizeLimit(String formatName) { // This method returns the size limit set specifically for this format name, - // if available, otherwise - the blanket limit that applies to all tabular - // ingests regardless of a format. - + // if available, otherwise - the blanket limit that applies to all tabular + // ingests regardless of a format. + if (formatName == null || formatName.equals("")) { - return getTabularIngestSizeLimit(); + return getTabularIngestSizeLimit(); } - + String limitEntry = settingsService.get(SettingsServiceBean.Key.TabularIngestSizeLimit.toString() + ":" + formatName); - + if (limitEntry != null) { try { Long sizeOption = new Long(limitEntry); return sizeOption; } catch (NumberFormatException nfe) { - logger.warning("Invalid value for TabularIngestSizeLimit:" + formatName + "? - " + limitEntry); + logger.warning("Invalid value for TabularIngestSizeLimit:" + formatName + "? - " + limitEntry ); } } - - return getTabularIngestSizeLimit(); + + return getTabularIngestSizeLimit(); } public boolean isOAIServerEnabled() { boolean defaultResponse = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.OAIServerEnabled, defaultResponse); } - + public void enableOAIServer() { settingsService.setValueForKey(SettingsServiceBean.Key.OAIServerEnabled, "true"); } - + public void disableOAIServer() { settingsService.deleteValueForKey(SettingsServiceBean.Key.OAIServerEnabled); - } - + } + public boolean isTimerServer() { String optionValue = System.getProperty(JVM_TIMER_SERVER_OPTION); if ("true".equalsIgnoreCase(optionValue)) { @@ -579,12 +604,11 @@ public String getOAuth2CallbackUrl() { } return saneDefault; } - + public boolean isShibPassiveLoginEnabled() { boolean defaultResponse = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.ShibPassiveLoginEnabled, defaultResponse); } - public boolean isShibAttributeCharacterSetConversionEnabled() { boolean defaultResponse = true; return settingsService.isTrueForKey(SettingsServiceBean.Key.ShibAttributeCharacterSetConversionEnabled, defaultResponse); @@ -610,7 +634,7 @@ public String getPVDictionaries() { public int getPVGoodStrength() { // FIXME: Change this to 21 to match Harvard's requirements or implement a way to disable the rule (0 or -1) and have the default be disabled. int goodStrengthLength = 20; - // String _goodStrengthLength = System.getProperty("pv.goodstrength", settingsService.get(SettingsServiceBean.Key.PVGoodStrength.toString())); + //String _goodStrengthLength = System.getProperty("pv.goodstrength", settingsService.get(SettingsServiceBean.Key.PVGoodStrength.toString())); String _goodStrengthLength = settingsService.get(SettingsServiceBean.Key.PVGoodStrength.toString()); try { goodStrengthLength = Integer.parseInt(_goodStrengthLength); @@ -769,7 +793,8 @@ public static FileUploadMethods fromString(String text) { public String toString() { return text; } - + + } /** @@ -785,8 +810,8 @@ public enum FileDownloadMethods { */ RSYNC("rsal/rsync"), NATIVE("native/http"), - GLOBUS("globus"); - + GLOBUS("globus") + ; private final String text; private FileDownloadMethods(final String text) { @@ -808,28 +833,27 @@ public static FileUploadMethods fromString(String text) { public String toString() { return text; } - + } - + public enum DataFilePIDFormat { DEPENDENT("DEPENDENT"), INDEPENDENT("INDEPENDENT"); - private final String text; public String getText() { return text; } - - private DataFilePIDFormat(final String text) { + + private DataFilePIDFormat(final String text){ this.text = text; } - + @Override public String toString() { return text; } - + } /** @@ -869,16 +893,16 @@ public String toString() { } - public boolean isPublicInstall() { + public boolean isPublicInstall(){ boolean saneDefault = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.PublicInstall, saneDefault); } - - public boolean isRsyncUpload() { + + public boolean isRsyncUpload(){ return getMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString(), true); } - public boolean isGlobusUpload() { + public boolean isGlobusUpload(){ return getMethodAvailable(FileUploadMethods.GLOBUS.toString(), true); } @@ -887,30 +911,30 @@ public boolean isWebloaderUpload(){ } // Controls if HTTP upload is enabled for both GUI and API. - public boolean isHTTPUpload() { + public boolean isHTTPUpload(){ return getMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString(), true); } - - public boolean isRsyncOnly() { + + public boolean isRsyncOnly(){ String downloadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.DownloadMethods); - if (downloadMethods == null) { + if(downloadMethods == null){ return false; } - if (!downloadMethods.toLowerCase().equals(SystemConfig.FileDownloadMethods.RSYNC.toString())) { + if (!downloadMethods.toLowerCase().equals(SystemConfig.FileDownloadMethods.RSYNC.toString())){ return false; } String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); - if (uploadMethods == null) { + if (uploadMethods==null){ return false; } else { - return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size() == 1 && uploadMethods.toLowerCase().equals(SystemConfig.FileUploadMethods.RSYNC.toString()); + return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size() == 1 && uploadMethods.toLowerCase().equals(SystemConfig.FileUploadMethods.RSYNC.toString()); } } - + public boolean isRsyncDownload() { return getMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString(), false); } - + public boolean isHTTPDownload() { return getMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString(), false); } @@ -918,14 +942,14 @@ public boolean isHTTPDownload() { public boolean isGlobusDownload() { return getMethodAvailable(FileUploadMethods.GLOBUS.toString(), false); } - + public boolean isGlobusFileDownload() { return (isGlobusDownload() && settingsService.isTrueForKey(SettingsServiceBean.Key.GlobusSingleFileTransfer, false)); } public List getGlobusStoresList() { - String globusStores = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusStores, ""); - return Arrays.asList(globusStores.split("\\s*,\\s*")); + String globusStores = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusStores, ""); + return Arrays.asList(globusStores.split("\\s*,\\s*")); } private Boolean getMethodAvailable(String method, boolean upload) { @@ -937,32 +961,31 @@ private Boolean getMethodAvailable(String method, boolean upload) { return Arrays.asList(methods.toLowerCase().split("\\s*,\\s*")).contains(method); } } - - public Integer getUploadMethodCount() { - String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); - if (uploadMethods == null) { + + public Integer getUploadMethodCount(){ + String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); + if (uploadMethods==null){ return 0; } else { - return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size(); - } + return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).size(); + } } - - public boolean isDataFilePIDSequentialDependent() { + public boolean isDataFilePIDSequentialDependent(){ String doiIdentifierType = settingsService.getValueForKey(SettingsServiceBean.Key.IdentifierGenerationStyle, "randomString"); String doiDataFileFormat = settingsService.getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT"); - if (doiIdentifierType.equals("storedProcGenerated") && doiDataFileFormat.equals("DEPENDENT")) { + if (doiIdentifierType.equals("storedProcGenerated") && doiDataFileFormat.equals("DEPENDENT")){ return true; } return false; } - + public int getPIDAsynchRegFileCount() { String fileCount = settingsService.getValueForKey(SettingsServiceBean.Key.PIDAsynchRegFileCount, "10"); int retVal = 10; try { retVal = Integer.parseInt(fileCount); - } catch (NumberFormatException e) { - // if no number in the setting we'll return 10 + } catch (NumberFormatException e) { + //if no number in the setting we'll return 10 } return retVal; } @@ -976,13 +999,13 @@ public boolean isFilePIDsEnabled() { boolean safeDefaultIfKeyNotFound = true; return settingsService.isTrueForKey(SettingsServiceBean.Key.FilePIDsEnabled, safeDefaultIfKeyNotFound); } - + public boolean isIndependentHandleService() { boolean safeDefaultIfKeyNotFound = false; return settingsService.isTrueForKey(SettingsServiceBean.Key.IndependentHandleService, safeDefaultIfKeyNotFound); - + } - + public String getHandleAuthHandle() { String handleAuthHandle = settingsService.getValueForKey(SettingsServiceBean.Key.HandleAuthHandle, null); return handleAuthHandle; @@ -992,61 +1015,60 @@ public String getMDCLogPath() { String mDCLogPath = settingsService.getValueForKey(SettingsServiceBean.Key.MDCLogPath, null); return mDCLogPath; } - + public boolean isDatafileValidationOnPublishEnabled() { boolean safeDefaultIfKeyNotFound = true; return settingsService.isTrueForKey(SettingsServiceBean.Key.FileValidationOnPublishEnabled, safeDefaultIfKeyNotFound); } - public boolean directUploadEnabled(DvObjectContainer container) { - // this method is used in UI only, therfore "dataverse.files." + driverId + ".upload-out-of-band" is not used here - return Boolean.parseBoolean(System.getProperty("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect")); - } - - public String getDataCiteRestApiUrlString() { - // As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. + public boolean directUploadEnabled(DvObjectContainer container) { + return Boolean.getBoolean("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect"); + } + + public String getDataCiteRestApiUrlString() { + //As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility. return System.getProperty("doi.dataciterestapiurlstring", System.getProperty("doi.mdcbaseurlstring", "https://api.datacite.org")); - } - + } + public boolean isExternalDataverseValidationEnabled() { return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataValidatorScript) != null; - // alternatively, we can also check if the script specified exists, + // alternatively, we can also check if the script specified exists, // and is executable. -- ? } - + public boolean isExternalDatasetValidationEnabled() { return settingsService.getValueForKey(SettingsServiceBean.Key.DatasetMetadataValidatorScript) != null; - // alternatively, we can also check if the script specified exists, + // alternatively, we can also check if the script specified exists, // and is executable. -- ? } - + public String getDataverseValidationExecutable() { return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataValidatorScript); } - + public String getDatasetValidationExecutable() { return settingsService.getValueForKey(SettingsServiceBean.Key.DatasetMetadataValidatorScript); } - + public String getDataverseValidationFailureMsg() { String defaultMessage = "This dataverse collection cannot be published because it has failed an external metadata validation test."; return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataPublishValidationFailureMsg, defaultMessage); } - + public String getDataverseUpdateValidationFailureMsg() { String defaultMessage = "This dataverse collection cannot be updated because it has failed an external metadata validation test."; return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataUpdateValidationFailureMsg, defaultMessage); } - + public String getDatasetValidationFailureMsg() { String defaultMessage = "This dataset cannot be published because it has failed an external metadata validation test."; return settingsService.getValueForKey(SettingsServiceBean.Key.DatasetMetadataValidationFailureMsg, defaultMessage); } - + public boolean isExternalValidationAdminOverrideEnabled() { return "true".equalsIgnoreCase(settingsService.getValueForKey(SettingsServiceBean.Key.ExternalValidationAdminOverride)); } - + public long getDatasetValidationSizeLimit() { String limitEntry = settingsService.getValueForKey(SettingsServiceBean.Key.DatasetChecksumValidationSizeLimit); @@ -1076,7 +1098,6 @@ public long getFileValidationSizeLimit() { // -1 means no limit is set; return -1; } - public Map getCurationLabels() { Map labelMap = new HashMap(); String setting = settingsService.getValueForKey(SettingsServiceBean.Key.AllowedCurationLabels, ""); @@ -1117,15 +1138,15 @@ public Map getCurationLabels() { } return labelMap; } - + public boolean isSignupDisabledForRemoteAuthProvider(String providerId) { - Boolean ret = settingsService.getValueForCompoundKeyAsBoolean(SettingsServiceBean.Key.AllowRemoteAuthSignUp, providerId); - - // we default to false - i.e., "not disabled" if the setting is not present: + Boolean ret = settingsService.getValueForCompoundKeyAsBoolean(SettingsServiceBean.Key.AllowRemoteAuthSignUp, providerId); + + // we default to false - i.e., "not disabled" if the setting is not present: if (ret == null) { - return false; + return false; } - - return !ret; + + return !ret; } } From 364858af43fcce6a1cfb4decefa8215621d4f648 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 25 Apr 2023 17:27:51 -0400 Subject: [PATCH 033/396] Further reconciliation of the quotas branch with the zip optimization pr. (#9361) --- .../impl/CreateNewDataFilesCommand.java | 111 +++++++++++++++--- .../harvard/iq/dataverse/util/FileUtil.java | 10 +- 2 files changed, 99 insertions(+), 22 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 9f281f9446d..15304502232 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -41,10 +41,12 @@ import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; +import java.util.Enumeration; import java.util.List; import java.util.Optional; import java.util.logging.Logger; import java.util.zip.GZIPInputStream; +import java.util.zip.ZipFile; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import javax.enterprise.inject.spi.CDI; @@ -224,6 +226,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // DataFile objects from its contents: } else if (finalType.equals("application/zip")) { + ZipFile zipFile = null; ZipInputStream unZippedIn = null; ZipEntry zipEntry = null; @@ -253,13 +256,88 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } */ + /** + * Perform a quick check for how many individual files are + * inside this zip archive. If it's above the limit, we can + * give up right away, without doing any unpacking. + * This should be a fairly inexpensive operation, we just need + * to read the directory at the end of the file. + */ + + if (charset != null) { + zipFile = new ZipFile(tempFile.toFile(), charset); + } else { + zipFile = new ZipFile(tempFile.toFile()); + } + /** + * The ZipFile constructors above will throw ZipException - + * a type of IOException - if there's something wrong + * with this file as a zip. There's no need to intercept it + * here, it will be caught further below, with other IOExceptions, + * at which point we'll give up on trying to unpack it and + * then attempt to save it as is. + */ + + int numberOfUnpackableFiles = 0; + Long combinedUnzippedFileSize = 0L; + + /** + * Note that we can't just use zipFile.size(), + * unfortunately, since that's the total number of entries, + * some of which can be directories. So we need to go + * through all the individual zipEntries and count the ones + * that are files. + */ + + for (Enumeration entries = zipFile.entries(); entries.hasMoreElements();) { + ZipEntry entry = entries.nextElement(); + logger.fine("inside first zip pass; this entry: "+entry.getName()); + if (!entry.isDirectory()) { + String shortName = entry.getName().replaceFirst("^.*[\\/]", ""); + // ... and, finally, check if it's a "fake" file - a zip archive entry + // created for a MacOS X filesystem element: (these + // start with "._") + if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) { + numberOfUnpackableFiles++; + if (numberOfUnpackableFiles > fileNumberLimit) { + logger.warning("Zip upload - too many files in the zip to process individually."); + warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit + + "); please upload a zip archive with fewer files, if you want them to be ingested " + + "as individual DataFiles."; + throw new IOException(); + } + // In addition to counting the files, we can + // also check the file size while we're here, + // provided the size limit is defined; if a single + // file is above the individual size limit, unzipped, + // we give up on unpacking this zip archive as well: + if (fileSizeLimit != null && entry.getSize() > fileSizeLimit) { + throw new FileExceedsMaxSizeException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(entry.getSize()), bytesToHumanReadable(fileSizeLimit))); + } + // Similarly, we want to check if saving all these unpacked + // files is going to push the disk usage over the + // quota: + if (storageQuotaLimit != null) { + combinedUnzippedFileSize = combinedUnzippedFileSize + entry.getSize(); + if (combinedUnzippedFileSize > storageQuotaLimit) { + throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(combinedUnzippedFileSize), bytesToHumanReadable(storageQuotaLimit))); + } + } + } + } + } + + // OK we're still here - that means we can proceed unzipping. + + // Close the ZipFile, re-open as ZipInputStream: + zipFile.close(); + if (charset != null) { unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset); } else { unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile())); } - Long storageQuotaLimitForUnzippedFiles = storageQuotaLimit; while (true) { try { zipEntry = unZippedIn.getNextEntry(); @@ -304,16 +382,16 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // OK, this seems like an OK file entry - we'll try // to read it and create a DataFile with it: - File unZippedTempFile = saveInputStreamInTempFile(unZippedIn, fileSizeLimit, storageQuotaLimitForUnzippedFiles); - DataFile datafile = FileUtil.createSingleDataFile(version, - unZippedTempFile, - null, - shortName, + String storageIdentifier = FileUtil.generateStorageIdentifier(); + File unzippedFile = new File(getFilesTempDirectory() + "/" + storageIdentifier); + Files.copy(unZippedIn, unzippedFile.toPath(), StandardCopyOption.REPLACE_EXISTING); + // No need to check the size of this unpacked file against the size limit, + // since we've already checked for that in the first pass. + + DataFile datafile = FileUtil.createSingleDataFile(version, null, storageIdentifier, shortName, MIME_TYPE_UNDETERMINED_DEFAULT, ctxt.systemConfig().getFileFixityChecksumAlgorithm(), null, false); - storageQuotaLimitForUnzippedFiles = storageQuotaLimitForUnzippedFiles - datafile.getFilesize(); - if (!fileEntryName.equals(shortName)) { // If the filename looks like a hierarchical folder name (i.e., contains slashes and backslashes), // we'll extract the directory name; then subject it to some "aggressive sanitizing" - strip all @@ -336,7 +414,9 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier(); try { - recognizedType = determineFileType(new File(tempFileName), shortName); + recognizedType = determineFileType(unzippedFile, shortName); + // null the File explicitly, to release any open FDs: + unzippedFile = null; logger.fine("File utility recognized unzipped file as " + recognizedType); if (recognizedType != null && !recognizedType.equals("")) { datafile.setContentType(recognizedType); @@ -373,14 +453,18 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.quota", Arrays.asList(FileSizeChecker.bytesToHumanReadable(storageQuotaLimit))); datafiles.clear(); } finally { + if (zipFile != null) { + try { + zipFile.close(); + } catch (Exception zEx) {} + } if (unZippedIn != null) { try { unZippedIn.close(); - } catch (Exception zEx) { - } + } catch (Exception zEx) {} } } - if (datafiles.size() > 0) { + if (!datafiles.isEmpty()) { // remove the uploaded zip file: try { Files.delete(tempFile); @@ -447,7 +531,6 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } catch (IOException ioex) { // this one can be ignored } - } } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) { logger.severe("One of the unzipped shape files exceeded the size limit, or the storage quota; giving up. " + femsx.getMessage()); @@ -468,7 +551,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException logger.warning("Could not remove temp folder, error message : " + ioex.getMessage()); } - if (datafiles.size() > 0) { + if (!datafiles.isEmpty()) { // remove the uploaded zip file: try { Files.delete(tempFile); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 6b1dfe9c163..73d06ad8bee 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -96,20 +96,14 @@ import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; -import org.apache.commons.io.FileUtils; import java.util.zip.GZIPInputStream; -import java.util.zip.ZipFile; -import java.util.zip.ZipEntry; -import java.util.zip.ZipInputStream; import org.apache.commons.io.FilenameUtils; import edu.harvard.iq.dataverse.dataaccess.DataAccessOption; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; -import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker; import java.util.Arrays; -import java.util.Enumeration; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import ucar.nc2.NetcdfFile; @@ -848,12 +842,12 @@ public static File saveInputStreamInTempFile(InputStream inputStream, Long fileS Long fileSize = tempFile.toFile().length(); if (fileSizeLimit != null && fileSize > fileSizeLimit) { try {tempFile.toFile().delete();} catch (Exception ex) {} - throw new FileExceedsMaxSizeException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit))); + throw new FileExceedsMaxSizeException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit))); } if (storageQuotaLimit != null && fileSize > storageQuotaLimit) { try {tempFile.toFile().delete();} catch (Exception ex) {} - throw new FileExceedsStorageQuotaException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit))); + throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit))); } return tempFile.toFile(); From f6ea0fd06262a878a4e41048d046bce00d1e9784 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 25 Apr 2023 17:55:58 -0400 Subject: [PATCH 034/396] remaining quota in bytes should never be shown as negative, is exceeded. #9361 --- .../java/edu/harvard/iq/dataverse/DataFileServiceBean.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 5732b9df7ed..2e30e6b76f8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -168,6 +168,9 @@ public void setTotalUsageInBytes(Long totalUsageInBytes) { } public Long getRemainingQuotaInBytes() { + if (totalUsageInBytes > totalAllocatedInBytes) { + return 0L; + } return totalAllocatedInBytes - totalUsageInBytes; } } From 30ea450e6c2f32ad3c545249375b510733e8ab13 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 26 Apr 2023 13:14:26 -0400 Subject: [PATCH 035/396] this should resolve the Sword upload errors (wasn't calling the new command with the right auth) #9361 --- .../iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index bd7067efb7b..c71eeb3d375 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -309,7 +309,7 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au try { //CreateDataFileResult createDataFilesResponse = FileUtil.createDataFiles(editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null, systemConfig); - Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null); + Command cmd = new CreateNewDataFilesCommand(dvReq, editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); dataFiles = createDataFilesResult.getDataFiles(); } catch (CommandException ex) { From 4955f810437788fef1a345e356308f8ea32c66cb Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 26 Apr 2023 14:38:48 -0400 Subject: [PATCH 036/396] And this should fix the remaining shape file test. #9361 --- .../engine/command/impl/CreateNewDataFilesCommand.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 15304502232..365a260cfd2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -522,7 +522,9 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException if (new_datafile != null) { datafiles.add(new_datafile); // todo: can this new_datafile be null? - storageQuotaLimitForRezippedFiles = storageQuotaLimitForRezippedFiles - new_datafile.getFilesize(); + if (storageQuotaLimitForRezippedFiles != null) { + storageQuotaLimitForRezippedFiles = storageQuotaLimitForRezippedFiles - new_datafile.getFilesize(); + } } else { logger.severe("Could not add part of rezipped shapefile. new_datafile was null: " + finalFile.getName()); } From cef601ee6af94fcfa56d9d02efb807d2c6bb20d2 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 9 May 2023 13:33:17 -0400 Subject: [PATCH 037/396] A new, experimental version of the Add Files command with the dynamic permissions lookup that allows it to be checked on either the Dataset or the parent Collection (when files are bing added in the context of creating a new dataset via the Add Dataset page). #9361 --- .../impl/CreateNewDataFilesCommand.java | 49 ++++++++++++++++++- 1 file changed, 47 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 365a260cfd2..fb4f7a7db86 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -9,11 +9,12 @@ import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +//import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper; import edu.harvard.iq.dataverse.DataFileServiceBean.UserStorageQuota; +import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; @@ -42,8 +43,12 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Enumeration; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.logging.Logger; import java.util.zip.GZIPInputStream; import java.util.zip.ZipFile; @@ -57,7 +62,10 @@ * * @author landreev */ -@RequiredPermissions( Permission.EditDataset ) +// Note the commented out @RequiredPermissions. We need to use dynamic +// permissions instead, to accommodate both adding files to an existing +// dataset and files being uploaded on create of a new dataset. +//@RequiredPermissions( Permission.EditDataset ) public class CreateNewDataFilesCommand extends AbstractCommand { private static final Logger logger = Logger.getLogger(CreateNewDataFilesCommand.class.getCanonicalName()); @@ -68,6 +76,7 @@ public class CreateNewDataFilesCommand extends AbstractCommand> getRequiredPermissions() { + Map> ret = new HashMap<>(); + + ret.put("", new HashSet<>()); + + if (dataverse != null) { + // The command is called in the context of uploading files on + // create of a new dataset + ret.get("").add(Permission.AddDataset); + } else { + // An existing dataset + ret.get("").add(Permission.EditDataset); + } + + return ret; + } } From 51cfa409ddaa7d9d111bff71d6e69beff0a3454c Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 9 May 2023 15:53:31 -0400 Subject: [PATCH 038/396] Changes to the edit files page to accommodate "upload on create" #9361 --- .../edu/harvard/iq/dataverse/EditDatafilesPage.java | 11 ++++++++++- .../command/impl/CreateNewDataFilesCommand.java | 3 ++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index c39e6f62ce2..b892acdb527 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -2058,7 +2058,16 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { // zip file. ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig); - Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); + Command cmd; + if (mode == FileEditMode.CREATE) { + // This is a file upload in the context of creating a brand new + // dataset that does not yet exist in the database. We must + // use the version of the Create New Files constructor that takes + // the parent Dataverse as the extra argument: + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, null, workingVersion.getDataset().getOwner()); + } else { + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); + } CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index fb4f7a7db86..e2d7f834c4a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -64,7 +64,8 @@ */ // Note the commented out @RequiredPermissions. We need to use dynamic // permissions instead, to accommodate both adding files to an existing -// dataset and files being uploaded on create of a new dataset. +// dataset and files being uploaded in the context of creating a new dataset +// via the Add Dataset page. //@RequiredPermissions( Permission.EditDataset ) public class CreateNewDataFilesCommand extends AbstractCommand { private static final Logger logger = Logger.getLogger(CreateNewDataFilesCommand.class.getCanonicalName()); From 8560ba4e16f763d48e6ec20f1ddaecd9b5728603 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 9 May 2023 17:16:56 -0400 Subject: [PATCH 039/396] Makes the create file command refuse a zip file if uncompressed content exceeds the remaining quota; as opposed to accepting it zipped, even if the compressed size under the quota. #9361 --- .../command/impl/CreateNewDataFilesCommand.java | 17 +++++++++++------ src/main/java/propertyFiles/Bundle.properties | 1 + 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index e2d7f834c4a..2608069dcb2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -348,7 +348,11 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException if (storageQuotaLimit != null) { combinedUnzippedFileSize = combinedUnzippedFileSize + entry.getSize(); if (combinedUnzippedFileSize > storageQuotaLimit) { - throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(combinedUnzippedFileSize), bytesToHumanReadable(storageQuotaLimit))); + //throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(combinedUnzippedFileSize), bytesToHumanReadable(storageQuotaLimit))); + // change of plans: if the unzipped content inside exceeds the remaining quota, + // we reject the upload outright, rather than accepting the zip + // file as is. + throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.unzipped.quota_exceeded"), bytesToHumanReadable(storageQuotaLimit)), this); } } } @@ -476,11 +480,12 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException logger.warning("One of the unzipped files exceeds the size limit; resorting to saving the file as is. " + femsx.getMessage()); warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.size", Arrays.asList(FileSizeChecker.bytesToHumanReadable(fileSizeLimit))); datafiles.clear(); - } catch (FileExceedsStorageQuotaException fesqx) { - logger.warning("One of the unzipped files exceeds the storage quota limit; resorting to saving the file as is. " + fesqx.getMessage()); - warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.quota", Arrays.asList(FileSizeChecker.bytesToHumanReadable(storageQuotaLimit))); - datafiles.clear(); - } finally { + } /*catch (FileExceedsStorageQuotaException fesqx) { + //logger.warning("One of the unzipped files exceeds the storage quota limit; resorting to saving the file as is. " + fesqx.getMessage()); + //warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.quota", Arrays.asList(FileSizeChecker.bytesToHumanReadable(storageQuotaLimit))); + //datafiles.clear(); + throw new CommandExecutionException(fesqx.getMessage(), fesqx, this); + }*/ finally { if (zipFile != null) { try { zipFile.close(); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 55679a01a07..b12e0df7c08 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2155,6 +2155,7 @@ file.addreplace.file_size_ok=File size is in range. file.addreplace.error.byte_abrev=B file.addreplace.error.file_exceeds_limit=This file size ({0}) exceeds the size limit of {1}. file.addreplace.error.quota_exceeded=This file (size {0}) exceeds the remaining storage quota of {1}. +file.addreplace.error.unzipped.quota_exceeded=Unzipped files exceed the remaining storage quota of {0}. file.addreplace.error.dataset_is_null=The dataset cannot be null. file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null. file.addreplace.error.parsing=Error in parsing provided json From 2ac403d35220f6ec61242f7f7f1c12b00f36a93c Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 11 May 2023 11:04:00 -0400 Subject: [PATCH 040/396] changes needed to accommodate the quota changing dynamically, before the uploaded files are saved (when uploading via the page) #9361 --- .../iq/dataverse/EditDatafilesPage.java | 25 +++++++----- .../datadeposit/MediaResourceManagerImpl.java | 10 +++-- .../datasetutility/AddReplaceFileHelper.java | 6 ++- .../impl/CreateNewDataFilesCommand.java | 39 +++++++++++++++---- 4 files changed, 57 insertions(+), 23 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index b892acdb527..57729f321b5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.provenance.ProvPopupFragmentBean; import edu.harvard.iq.dataverse.DataFile.ChecksumType; +import edu.harvard.iq.dataverse.DataFileServiceBean.UserStorageQuota; import edu.harvard.iq.dataverse.api.AbstractApiBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; @@ -206,6 +207,7 @@ public enum Referrer { private final int NUMBER_OF_SCROLL_ROWS = 25; private DataFile singleFile = null; + private UserStorageQuota userStorageQuota = null; public DataFile getSingleFile() { return singleFile; @@ -358,7 +360,7 @@ public String getHumanMaxTotalUploadSizeInBytes() { } public boolean isStorageQuotaEnforced() { - return maxTotalUploadSizeInBytes != null; + return userStorageQuota != null; } public Long getMaxIngestSizeInBytes() { @@ -529,22 +531,24 @@ public String initCreateMode(String modeToken, DatasetVersion version, MutableBo this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); if (systemConfig.isStorageQuotasEnforced()) { - this.maxTotalUploadSizeInBytes = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes(); + this.userStorageQuota = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset); + this.maxTotalUploadSizeInBytes = userStorageQuota.getRemainingQuotaInBytes(); } else { this.maxTotalUploadSizeInBytes = null; } this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit(); - + logger.fine("done"); saveEnabled = true; + return null; } public boolean isQuotaExceeded() { - return systemConfig.isStorageQuotasEnforced() && datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes() == 0; + return systemConfig.isStorageQuotasEnforced() && userStorageQuota != null && userStorageQuota.getRemainingQuotaInBytes() == 0; } public String init() { @@ -589,7 +593,8 @@ public String init() { clone = workingVersion.cloneDatasetVersion(); this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); if (systemConfig.isStorageQuotasEnforced()) { - this.maxTotalUploadSizeInBytes = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset).getRemainingQuotaInBytes(); + this.userStorageQuota = datafileService.getUserStorageQuota((AuthenticatedUser) session.getUser(), dataset); + this.maxTotalUploadSizeInBytes = userStorageQuota.getRemainingQuotaInBytes(); } this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); @@ -687,7 +692,7 @@ public String init() { if (isHasPublicStore()){ JH.addMessage(FacesMessage.SEVERITY_WARN, getBundleString("dataset.message.label.fileAccess"), getBundleString("dataset.message.publicInstall")); } - + return null; } @@ -1525,7 +1530,7 @@ public void handleDropBoxUpload(ActionEvent event) { // zip file. //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); //CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null, systemConfig); - Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, dropBoxStream, fileName, "application/octet-stream", null, userStorageQuota, null); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage)); @@ -2064,9 +2069,9 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { // dataset that does not yet exist in the database. We must // use the version of the Create New Files constructor that takes // the parent Dataverse as the extra argument: - cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, null, workingVersion.getDataset().getOwner()); + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, userStorageQuota, null, null, workingVersion.getDataset().getOwner()); } else { - cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null); + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, userStorageQuota, null); } CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); @@ -2193,7 +2198,7 @@ public void handleExternalUpload() { //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType, systemConfig); - Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, userStorageQuota, checksumValue, checksumType); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage)); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index c71eeb3d375..0f11b858238 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -10,9 +10,8 @@ import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.PermissionServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.dataaccess.StorageIO; -import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; +import edu.harvard.iq.dataverse.DataFileServiceBean.UserStorageQuota; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; @@ -22,7 +21,6 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.ConstraintViolationUtil; -import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -309,7 +307,11 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au try { //CreateDataFileResult createDataFilesResponse = FileUtil.createDataFiles(editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null, systemConfig); - Command cmd = new CreateNewDataFilesCommand(dvReq, editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null); + UserStorageQuota quota = null; + if (systemConfig.isStorageQuotasEnforced()) { + quota = dataFileService.getUserStorageQuota(user, dataset); + } + Command cmd = new CreateNewDataFilesCommand(dvReq, editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, quota, null); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); dataFiles = createDataFilesResult.getDataFiles(); } catch (CommandException ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index 3914c4dc4cc..38a31dba82f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -1212,7 +1212,11 @@ private boolean step_030_createNewFilesViaIngest(){ this.newCheckSumType, this.systemConfig);*/ - Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, newCheckSum, newCheckSumType); + DataFileServiceBean.UserStorageQuota quota = null; + if (systemConfig.isStorageQuotasEnforced()) { + quota = fileService.getUserStorageQuota(dvRequest.getAuthenticatedUser(), dataset); + } + Command cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); initialFileList = createDataFilesResult.getDataFiles(); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 2608069dcb2..b7ceddcd8bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -78,12 +78,13 @@ public class CreateNewDataFilesCommand extends AbstractCommand datafiles = new ArrayList<>(); //When there is no checksum/checksumtype being sent (normal upload, needs to be calculated), set the type to the current default @@ -132,7 +136,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException if (ctxt.systemConfig().isStorageQuotasEnforced()) { //storageQuotaLimit = ctxt.files().getClass()...; - UserStorageQuota quota = ctxt.files().getUserStorageQuota(super.getRequest().getAuthenticatedUser(), this.version.getDataset()); + //UserStorageQuota quota = ctxt.files().getUserStorageQuota(super.getRequest().getAuthenticatedUser(), this.version.getDataset()); if (quota != null) { storageQuotaLimit = quota.getRemainingQuotaInBytes(); } @@ -247,6 +251,10 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } datafiles.add(datafile); + // Update quota if present + if (quota != null) { + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() - datafile.getFilesize()); + } return CreateDataFileResult.success(fileName, finalType, datafiles); } @@ -259,6 +267,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException ZipEntry zipEntry = null; int fileNumberLimit = ctxt.systemConfig().getZipUploadFilesLimit(); + Long combinedUnzippedFileSize = 0L; try { Charset charset = null; @@ -307,8 +316,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException */ int numberOfUnpackableFiles = 0; - Long combinedUnzippedFileSize = 0L; - + /** * Note that we can't just use zipFile.size(), * unfortunately, since that's the total number of entries, @@ -363,6 +371,8 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // Close the ZipFile, re-open as ZipInputStream: zipFile.close(); + // reset: + combinedUnzippedFileSize = 0L; if (charset != null) { unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset); @@ -458,6 +468,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } datafiles.add(datafile); + combinedUnzippedFileSize += datafile.getFilesize(); } } } @@ -505,6 +516,10 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // do nothing - it's just a temp file. logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); } + // update the quota object: + if (quota != null) { + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + combinedUnzippedFileSize); + } // and return: return CreateDataFileResult.success(fileName, finalType, datafiles); } @@ -524,9 +539,9 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException logger.severe("Processing of zipped shapefile failed."); return CreateDataFileResult.error(fileName, finalType); } + Long storageQuotaLimitForRezippedFiles = storageQuotaLimit; try { - Long storageQuotaLimitForRezippedFiles = storageQuotaLimit; for (File finalFile : shpIngestHelper.getFinalRezippedFiles()) { FileInputStream finalFileInputStream = new FileInputStream(finalFile); @@ -598,6 +613,10 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException logger.warning("Unable to delete: " + tempFile.toString() + "due to Security Exception: " + se.getMessage()); } + // update the quota object: + if (quota != null) { + quota.setTotalUsageInBytes(storageQuotaLimitForRezippedFiles); + } return CreateDataFileResult.success(fileName, finalType, datafiles); } else { logger.severe("No files added from directory of rezipped shapefiles"); @@ -665,6 +684,10 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } datafiles.add(datafile); + // Update quota (may not be necessary in the context of direct upload - ?) + if (quota != null) { + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() - datafile.getFilesize()); + } return CreateDataFileResult.success(fileName, finalType, datafiles); } From 87c7383c646de0756503cc6116e13f3d47f0ad31 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 11 May 2023 13:03:07 -0400 Subject: [PATCH 041/396] More fixes for the command. #9361 --- .../impl/CreateNewDataFilesCommand.java | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index b7ceddcd8bf..6f582a4c015 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -118,7 +118,6 @@ public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion versi @Override public CreateDataFileResult execute(CommandContext ctxt) throws CommandException { - logger.info("entering command.execute();"); List datafiles = new ArrayList<>(); //When there is no checksum/checksumtype being sent (normal upload, needs to be calculated), set the type to the current default @@ -135,8 +134,6 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException Long storageQuotaLimit = null; if (ctxt.systemConfig().isStorageQuotasEnforced()) { - //storageQuotaLimit = ctxt.files().getClass()...; - //UserStorageQuota quota = ctxt.files().getUserStorageQuota(super.getRequest().getAuthenticatedUser(), this.version.getDataset()); if (quota != null) { storageQuotaLimit = quota.getRemainingQuotaInBytes(); } @@ -220,9 +217,11 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } DataFile datafile = null; + long fileSize = 0L; try { uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile())); File unZippedTempFile = saveInputStreamInTempFile(uncompressedIn, fileSizeLimit, storageQuotaLimit); + fileSize = unZippedTempFile.length(); datafile = FileUtil.createSingleDataFile(version, unZippedTempFile, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT, ctxt.systemConfig().getFileFixityChecksumAlgorithm()); } catch (IOException | FileExceedsMaxSizeException | FileExceedsStorageQuotaException ioex) { // it looks like we simply skip the file silently, if its uncompressed size @@ -253,7 +252,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException datafiles.add(datafile); // Update quota if present if (quota != null) { - quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() - datafile.getFilesize()); + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + fileSize); } return CreateDataFileResult.success(fileName, finalType, datafiles); } @@ -539,7 +538,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException logger.severe("Processing of zipped shapefile failed."); return CreateDataFileResult.error(fileName, finalType); } - Long storageQuotaLimitForRezippedFiles = storageQuotaLimit; + long combinedRezippedFileSize = 0L; try { @@ -551,7 +550,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException continue; } - File unZippedShapeTempFile = saveInputStreamInTempFile(finalFileInputStream, fileSizeLimit, storageQuotaLimitForRezippedFiles); + File unZippedShapeTempFile = saveInputStreamInTempFile(finalFileInputStream, fileSizeLimit, storageQuotaLimit != null ? storageQuotaLimit - combinedRezippedFileSize : null); DataFile new_datafile = FileUtil.createSingleDataFile(version, unZippedShapeTempFile, finalFile.getName(), finalType, ctxt.systemConfig().getFileFixityChecksumAlgorithm()); String directoryName = null; @@ -569,10 +568,8 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } if (new_datafile != null) { datafiles.add(new_datafile); + combinedRezippedFileSize += unZippedShapeTempFile.length(); // todo: can this new_datafile be null? - if (storageQuotaLimitForRezippedFiles != null) { - storageQuotaLimitForRezippedFiles = storageQuotaLimitForRezippedFiles - new_datafile.getFilesize(); - } } else { logger.severe("Could not add part of rezipped shapefile. new_datafile was null: " + finalFile.getName()); } @@ -615,7 +612,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } // update the quota object: if (quota != null) { - quota.setTotalUsageInBytes(storageQuotaLimitForRezippedFiles); + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + combinedRezippedFileSize); } return CreateDataFileResult.success(fileName, finalType, datafiles); } else { @@ -686,7 +683,7 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // Update quota (may not be necessary in the context of direct upload - ?) if (quota != null) { - quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() - datafile.getFilesize()); + quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + fileSize); } return CreateDataFileResult.success(fileName, finalType, datafiles); } From cd47f9389e881866a45792b5d9cf9b286c5d7fa7 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 09:30:23 +0200 Subject: [PATCH 042/396] chore(deps): update Nimbus SDK to 10.9.1 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 49443f62453..8764e4f493d 100644 --- a/pom.xml +++ b/pom.xml @@ -385,7 +385,7 @@ com.nimbusds oauth2-oidc-sdk - 10.7.1 + 10.9.1 From 94598bd66de2ee5a535cca6aab63b020ad95f65b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 10:04:39 +0200 Subject: [PATCH 043/396] feat(auth): pass OAuth2/OIDC state parameter to provider When the client is returning from the provider to us, carrying along the authorization code we need to retrieve user details, we also receive again the state. The state was generated and sent by us, and will not be altered by the provider, which makes it perfect to identify the original request we built before sending the client to the provider. Passing this state to the provider enables the provider to reuse this information. This is crucial to enable PKCE support, as we need to remember which secret code we sent to the provider - otherwise we will not be able to verify the authz code. Tests have been adapted. --- .../oauth2/AbstractOAuth2AuthenticationProvider.java | 3 ++- .../providers/oauth2/OAuth2LoginBackingBean.java | 2 +- .../providers/oauth2/OAuth2LoginBackingBeanTest.java | 11 ++++++++--- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java index 01139cd2e27..373a295487d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java @@ -139,6 +139,7 @@ public OAuth20Service getService(String callbackUrl) { * Receive user data from OAuth2 provider after authn/z has been successfull. (Callback view uses this) * Request a token and access the resource, parse output and return user details. * @param code The authz code sent from the provider + * @param state The state which was communicated between us and the provider, identifying the exact request * @param redirectUrl The redirect URL (some providers require this when fetching the access token, e. g. Google) * @return A user record containing all user details accessible for us * @throws IOException Thrown when communication with the provider fails @@ -146,7 +147,7 @@ public OAuth20Service getService(String callbackUrl) { * @throws InterruptedException Thrown when the requests thread is failing * @throws ExecutionException Thrown when the requests thread is failing */ - public OAuth2UserRecord getUserRecord(String code, String redirectUrl) + public OAuth2UserRecord getUserRecord(String code, String state, String redirectUrl) throws IOException, OAuth2Exception, InterruptedException, ExecutionException { OAuth20Service service = getService(redirectUrl); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java index c5be41a014a..7b52f2e9b16 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java @@ -100,7 +100,7 @@ public void exchangeCodeForToken() throws IOException { if (oIdp.isPresent() && code.isPresent()) { AbstractOAuth2AuthenticationProvider idp = oIdp.get(); - oauthUser = idp.getUserRecord(code.get(), systemConfig.getOAuth2CallbackUrl()); + oauthUser = idp.getUserRecord(code.get(), req.getParameter("state"), systemConfig.getOAuth2CallbackUrl()); // Throw an error if this authentication method is disabled: // (it's not clear if it's possible at all, for somebody to get here with diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java index 80249cc89e8..0c54c050d79 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java @@ -105,6 +105,7 @@ class ecft { @Mock DataverseSession session; @Mock OAuth2TokenDataServiceBean oauth2Tokens; Optional redirect = Optional.of("/hellotest"); + String state; @BeforeEach void setUp() throws IOException { @@ -118,7 +119,11 @@ void setUp() throws IOException { when(externalContextMock.getRequest()).thenReturn(requestMock); lenient().when(externalContextMock.getFlash()).thenReturn(flashMock); lenient().when(requestMock.getReader()).thenReturn(reader); - doReturn(loginBackingBean.createState(testIdp, this.redirect)).when(requestMock).getParameter("state"); + + // Save the state as we need it for injection (necessary because of PKCE support) + state = loginBackingBean.createState(testIdp, this.redirect); + doReturn(state).when(requestMock).getParameter("state"); + // travel in time at least 10 milliseconds (remote calls & redirects are much likely longer) // (if not doing this tests become flaky on fast machinas) loginBackingBean.clock = Clock.offset(constantClock, Duration.ofMillis(10)); @@ -140,7 +145,7 @@ void newUser() throws Exception { // fake the code received from the provider when(requestMock.getParameter("code")).thenReturn(code); // let's deep-fake the result of getUserRecord() - doReturn(userRecord).when(testIdp).getUserRecord(code, null); + doReturn(userRecord).when(testIdp).getUserRecord(code, state, null); // WHEN (& then) // capture the redirect target from the faces context @@ -168,7 +173,7 @@ void existingUser() throws Exception { // fake the code received from the provider when(requestMock.getParameter("code")).thenReturn(code); // let's deep-fake the result of getUserRecord() - doReturn(userRecord).when(testIdp).getUserRecord(code, null); + doReturn(userRecord).when(testIdp).getUserRecord(code, state, null); doReturn(tokenData).when(userRecord).getTokenData(); // also fake the result of the lookup in the auth service doReturn(userIdentifier).when(userRecord).getUserRecordIdentifier(); From 5fbee2e067722e7ff649dd30ae3e8afa90851958 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 10:05:03 +0200 Subject: [PATCH 044/396] feat(auth): add OIDC PKCE settings to JvmSettings --- .../java/edu/harvard/iq/dataverse/settings/JvmSettings.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index dc9267805e6..ba90d895ae9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -121,6 +121,9 @@ public enum JvmSettings { OIDC_AUTH_SERVER_URL(SCOPE_OIDC, "auth-server-url"), OIDC_CLIENT_ID(SCOPE_OIDC, "client-id"), OIDC_CLIENT_SECRET(SCOPE_OIDC, "client-secret"), + SCOPE_OIDC_PKCE(SCOPE_OIDC, "pkce"), + OIDC_PKCE_ENABLED(SCOPE_OIDC_PKCE, "enabled"), + OIDC_PKCE_METHOD(SCOPE_OIDC_PKCE, "method"), ; From c0d21cc9f935b3dc43653baa3d132681ff94c1c0 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 10:07:35 +0200 Subject: [PATCH 045/396] feat(auth): enable PKCE usage in OIDC provider - Adding PKCE parameters to constructor - Adding a hashmap to cache the code verifiers mapped by the unique state we generate - Enabling the actual workflow of PKCE --- .../oauth2/oidc/OIDCAuthProvider.java | 38 ++++++++++++++++--- 1 file changed, 33 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index 4b6c575cfaf..91d552618ef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -18,6 +18,8 @@ import com.nimbusds.oauth2.sdk.id.ClientID; import com.nimbusds.oauth2.sdk.id.Issuer; import com.nimbusds.oauth2.sdk.id.State; +import com.nimbusds.oauth2.sdk.pkce.CodeChallengeMethod; +import com.nimbusds.oauth2.sdk.pkce.CodeVerifier; import com.nimbusds.oauth2.sdk.token.BearerAccessToken; import com.nimbusds.openid.connect.sdk.AuthenticationRequest; import com.nimbusds.openid.connect.sdk.Nonce; @@ -39,7 +41,9 @@ import java.net.URI; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.logging.Logger; @@ -57,12 +61,25 @@ public class OIDCAuthProvider extends AbstractOAuth2AuthenticationProvider { final Issuer issuer; final ClientAuthentication clientAuth; final OIDCProviderMetadata idpMetadata; + final boolean pkceEnabled; + final CodeChallengeMethod pkceMethod; - public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEndpointURL) throws AuthorizationSetupException { + /** + * Using PKCE, we create and send a special {@link CodeVerifier}. This contains a secret + * we need again when verifying the response by the provider, thus the cache. + */ + private final Map verifierCache = new ConcurrentHashMap<>(); + + public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEndpointURL, + boolean pkceEnabled, String pkceMethod) throws AuthorizationSetupException { this.clientSecret = aClientSecret; // nedded for state creation this.clientAuth = new ClientSecretBasic(new ClientID(aClientId), new Secret(aClientSecret)); this.issuer = new Issuer(issuerEndpointURL); + this.idpMetadata = getMetadata(); + + this.pkceEnabled = pkceEnabled; + this.pkceMethod = CodeChallengeMethod.parse(pkceMethod); } /** @@ -147,6 +164,7 @@ public String buildAuthzUrl(String state, String callbackUrl) { State stateObject = new State(state); URI callback = URI.create(callbackUrl); Nonce nonce = new Nonce(); + CodeVerifier pkceVerifier = pkceEnabled ? new CodeVerifier() : null; AuthenticationRequest req = new AuthenticationRequest.Builder(new ResponseType("code"), Scope.parse(this.scope), @@ -154,9 +172,15 @@ public String buildAuthzUrl(String state, String callbackUrl) { callback) .endpointURI(idpMetadata.getAuthorizationEndpointURI()) .state(stateObject) + // Called method is nullsafe - will disable sending a PKCE challenge in case the verifier is not present + .codeChallenge(pkceVerifier, pkceMethod) .nonce(nonce) .build(); + // Cache the PKCE verifier, as we need the secret in it for verification later again, after the client sends us + // the auth code! We use the state to cache the verifier, as the state is unique per authentication event. + this.verifierCache.put(state, pkceVerifier); + return req.toURI().toString(); } @@ -172,10 +196,14 @@ public String buildAuthzUrl(String state, String callbackUrl) { * @throws ExecutionException Thrown when the requests thread is failing */ @Override - public OAuth2UserRecord getUserRecord(String code, String redirectUrl) - throws IOException, OAuth2Exception, InterruptedException, ExecutionException { - // Create grant object - AuthorizationGrant codeGrant = new AuthorizationCodeGrant(new AuthorizationCode(code), URI.create(redirectUrl)); + public OAuth2UserRecord getUserRecord(String code, String state, String redirectUrl) throws IOException, OAuth2Exception { + // Retrieve the verifier from the cache and clear from the cache. If not found, will be null. + // Will be sent to token endpoint for verification, so if required but missing, will lead to exception. + CodeVerifier verifier = verifierCache.remove(state); + + // Create grant object - again, this is null-safe for the verifier + AuthorizationGrant codeGrant = new AuthorizationCodeGrant( + new AuthorizationCode(code), URI.create(redirectUrl), verifier); // Get Access Token first Optional accessToken = getAccessToken(codeGrant); From ef4d192156bd7efd6e83226c57dd7deea545a6e2 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 10:08:37 +0200 Subject: [PATCH 046/396] feat(auth): extend OIDC provider factory to understand PKCE parameters To enable backward compatibility, default to disabled and method S256. --- .../oauth2/oidc/OIDCAuthenticationProviderFactory.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java index 89cf1cb986d..3f8c18d0567 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java @@ -41,7 +41,9 @@ public AuthenticationProvider buildProvider( AuthenticationProviderRow aRow ) th OIDCAuthProvider oidc = new OIDCAuthProvider( factoryData.get("clientId"), factoryData.get("clientSecret"), - factoryData.get("issuer") + factoryData.get("issuer"), + Boolean.parseBoolean(factoryData.getOrDefault("pkceEnabled", "false")), + factoryData.getOrDefault("pkceMethod", "S256") ); oidc.setId(aRow.getId()); @@ -60,7 +62,9 @@ public static AuthenticationProvider buildFromSettings() throws AuthorizationSet OIDCAuthProvider oidc = new OIDCAuthProvider( JvmSettings.OIDC_CLIENT_ID.lookup(), JvmSettings.OIDC_CLIENT_SECRET.lookup(), - JvmSettings.OIDC_AUTH_SERVER_URL.lookup() + JvmSettings.OIDC_AUTH_SERVER_URL.lookup(), + JvmSettings.OIDC_PKCE_ENABLED.lookupOptional(Boolean.class).orElse(false), + JvmSettings.OIDC_PKCE_METHOD.lookupOptional().orElse("S256") ); oidc.setId("oidc-mpconfig"); From 37bcc3a69930879810c7a7eb87f465219a00a24d Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 10:28:32 +0200 Subject: [PATCH 047/396] doc(auth): add OIDC PKCE configuration --- .../source/installation/oidc.rst | 34 ++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/oidc.rst b/doc/sphinx-guides/source/installation/oidc.rst index fbcbd3eb4ad..9848d73b189 100644 --- a/doc/sphinx-guides/source/installation/oidc.rst +++ b/doc/sphinx-guides/source/installation/oidc.rst @@ -69,9 +69,32 @@ After adding a provider, the Log In page will by default show the "builtin" prov In contrast to our :doc:`oauth2`, you can use multiple providers by creating distinct configurations enabled by the same technology and without modifying the Dataverse Software code base (standards for the win!). + +.. _oidc-pkce: + +Enabling PKCE Security +^^^^^^^^^^^^^^^^^^^^^^ + +Many providers these days support or even require the usage of `PKCE `_ to safeguard against +some attacks and enable public clients that cannot have a secure secret to still use OpenID Connect (or OAuth2). + +The Dataverse built OIDC client can be enabled to use PKCE and which method to use when creating the code challenge. +See also `this explanation of the flow `_ +for details on how this works. + +As we are using the `Nimbus SDK `_ as our client +library, we support the standard ``PLAIN`` and ``S256`` code challenge methods. "SHA-256 method" is the default +as recommend in `RFC7636 `_. If your provider needs some +other method (unlikely), please open an issue. + +The provisioning sections below contain in the example the parameters you may use to configure PKCE. + Provision via REST API ^^^^^^^^^^^^^^^^^^^^^^ +Note: you may omit the PKCE related settings from ``factoryData`` below if you don't plan on using PKCE - default is +disabled. + Please create a ``my-oidc-provider.json`` file like this, replacing every ``<...>`` with your values: .. code-block:: json @@ -81,7 +104,7 @@ Please create a ``my-oidc-provider.json`` file like this, replacing every ``<... "factoryAlias":"oidc", "title":"", "subtitle":"", - "factoryData":"type: oidc | issuer: | clientId: | clientSecret: ", + "factoryData":"type: oidc | issuer: | clientId: | clientSecret: | pkceEnabled: | pkceMethod: ", "enabled":true } @@ -105,6 +128,7 @@ The following options are available: .. list-table:: :widths: 25 55 10 10 :header-rows: 1 + :align: left * - Option - Description @@ -126,6 +150,14 @@ The following options are available: - The base URL of the OpenID Connect (OIDC) server as explained above. - Y - \- + * - ``dataverse.auth.oidc.pkce.enabled`` + - Set to ``true`` to enable :ref:`PKCE ` in auth flow. + - N + - ``false`` + * - ``dataverse.auth.oidc.pkce.method`` + - Set code challenge method. Default equals best practice. + - N + - ``S256`` * - ``dataverse.auth.oidc.title`` - The UI visible name for this provider in login options. - N From 5ed66e6bb481bd94c81965b4032e642a058943e4 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 May 2023 12:20:20 +0200 Subject: [PATCH 048/396] chore(deps): update to Testcontainers 1.18.1 --- modules/dataverse-parent/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 7f8f73e049a..060fc22b4d2 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -167,7 +167,7 @@ 5.0.0 - 1.17.6 + 1.18.1 2.10.1 4.13.1 From 07c70d8933d550eb01f3bd2cfb3626b9a2d94f8c Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 6 Dec 2022 13:01:01 -0500 Subject: [PATCH 049/396] flyway update script --- .../resources/db/migration/V5.13.0.3__guestbook-on-request.sql | 1 + 1 file changed, 1 insertion(+) create mode 100644 src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql diff --git a/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql b/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql new file mode 100644 index 00000000000..1ffc87dfa32 --- /dev/null +++ b/src/main/resources/db/migration/V5.13.0.3__guestbook-on-request.sql @@ -0,0 +1 @@ +ALTER TABLE fileaccessrequests ADD COLUMN IF NOT EXISTS request_state VARCHAR(64); From c8726958e45d6e25bc4ca773c55ce58afc1bf431 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 6 Dec 2022 13:56:33 -0500 Subject: [PATCH 050/396] fix duplicate action --- src/main/webapp/filesFragment.xhtml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 6122b86b274..7395998042f 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -483,7 +483,8 @@ styleClass="btn btn-default btn-request" action="#{DatasetPage.requestAccessMultipleFiles()}" update="@form, @([id$=messagePanel])" - disabled="#{DatasetPage.locked}"> + disabled="#{DatasetPage.locked or !DatasetPage.fileAccessRequestMultiButtonEnabled}"> + #{bundle['file.requestAccess']} Date: Tue, 6 Dec 2022 13:57:13 -0500 Subject: [PATCH 051/396] add comment to check possible unused method --- .../edu/harvard/iq/dataverse/DatasetPage.java | 62 +++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 9294620d790..33b598083da 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3209,6 +3209,68 @@ private void updateGuestbookResponse (boolean guestbookRequired, boolean downloa } + /*helper function to filter the selected files into , + and and for reuse*/ + + private boolean filterSelectedFiles(){ + setSelectedDownloadableFiles(new ArrayList<>()); + setSelectedNonDownloadableFiles(new ArrayList<>()); + setSelectedRestrictedFiles(new ArrayList<>()); + setSelectedUnrestrictedFiles(new ArrayList<>()); + + boolean someFiles = false; + for (FileMetadata fmd : this.selectedFiles){ + if(this.fileDownloadHelper.canDownloadFile(fmd)){ + getSelectedDownloadableFiles().add(fmd); + someFiles=true; + } else { + getSelectedNonDownloadableFiles().add(fmd); + } + if(fmd.isRestricted()){ + getSelectedRestrictedFiles().add(fmd); //might be downloadable to user or not + someFiles=true; + } else { + getSelectedUnrestrictedFiles().add(fmd); + someFiles=true; + } + + } + return someFiles; + } +//QDRADA - still needed? + public void validateFilesForRequestAccess(){ + this.filterSelectedFiles(); + + if(!dataset.isFileAccessRequest()){ //is this needed? wouldn't be able to click Request Access if this !isFileAccessRequest() + return; + } + + if(!this.selectedRestrictedFiles.isEmpty()){ + ArrayList nonDownloadableRestrictedFiles = new ArrayList<>(); + + List userRequestedDataFiles = ((AuthenticatedUser) session.getUser()).getRequestedDataFiles(); + + for(FileMetadata fmd : this.selectedRestrictedFiles){ + if(!this.fileDownloadHelper.canDownloadFile(fmd) && !userRequestedDataFiles.contains(fmd.getDataFile())){ + nonDownloadableRestrictedFiles.add(fmd); + } + } + + if(!nonDownloadableRestrictedFiles.isEmpty()){ + guestbookResponse.setDataFile(null); + guestbookResponse.setSelectedFileIds(this.getFilesIdsString(nonDownloadableRestrictedFiles)); + + if(this.isGuestbookAndTermsPopupRequired()){ //need to pop up the guestbook and terms dialog + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"); + } else { + this.requestAccessMultipleFiles(); + } + } else { + //popup select data files + } + } + } + private boolean selectAllFiles; public boolean isSelectAllFiles() { From 76b3b181d449336a9659bf5dc54637be90ff8679 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 6 Dec 2022 13:57:30 -0500 Subject: [PATCH 052/396] debug logging --- .../iq/dataverse/engine/command/impl/RequestAccessCommand.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java index b87b9a73aa5..df7c7367f2d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java @@ -52,6 +52,8 @@ public DataFile execute(CommandContext ctxt) throws CommandException { } //if user already has permission to download file or the file is public throw command exception + logger.info("User: " + this.getRequest().getAuthenticatedUser().getName()); + logger.info("File: " + file.getId()); if (!file.isRestricted() || ctxt.permissions().requestOn(this.getRequest(), file).has(Permission.DownloadFile)) { throw new CommandException(BundleUtil.getStringFromBundle("file.requestAccess.notAllowed.alreadyHasDownloadPermisssion"), this); } From bd603ec7a803542460eea3a66600572bdf85f57a Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 6 Dec 2022 14:17:45 -0500 Subject: [PATCH 053/396] more debug --- .../command/impl/RequestAccessCommand.java | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java index df7c7367f2d..d710ed66551 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java @@ -44,6 +44,19 @@ public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, Boolean s this.sendNotification = sendNotification; } + + public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr) { + this(dvRequest, file, gbr, false); + } + + public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr, Boolean sendNotification) { + // for data file check permission on owning dataset + super(dvRequest, file); + this.file = file; + this.requester = (AuthenticatedUser) dvRequest.getUser(); + this.fileAccessRequest = new FileAccessRequest(file,requester,gbr); + this.sendNotification = sendNotification; + } @Override public DataFile execute(CommandContext ctxt) throws CommandException { @@ -53,7 +66,8 @@ public DataFile execute(CommandContext ctxt) throws CommandException { //if user already has permission to download file or the file is public throw command exception logger.info("User: " + this.getRequest().getAuthenticatedUser().getName()); - logger.info("File: " + file.getId()); + logger.info("File: " + file.getId() + " : restricted?: " + file.isRestricted()); + logger.info("permission?: " + ctxt.permissions().requestOn(this.getRequest(), file).has(Permission.DownloadFile)); if (!file.isRestricted() || ctxt.permissions().requestOn(this.getRequest(), file).has(Permission.DownloadFile)) { throw new CommandException(BundleUtil.getStringFromBundle("file.requestAccess.notAllowed.alreadyHasDownloadPermisssion"), this); } From 5e29a0600b6d4fe59d89191897bc61cd45d78494 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 6 Dec 2022 10:20:39 -0500 Subject: [PATCH 054/396] more debug --- .../edu/harvard/iq/dataverse/DataFile.java | 11 + .../iq/dataverse/DataFileServiceBean.java | 53 ++++ .../edu/harvard/iq/dataverse/Dataset.java | 6 + .../edu/harvard/iq/dataverse/DatasetPage.java | 134 +++++++-- .../iq/dataverse/FileAccessRequest.java | 262 ++++++++++++++---- .../FileAccessRequestServiceBean.java | 89 ++++++ .../iq/dataverse/FileDownloadHelper.java | 30 +- .../iq/dataverse/FileDownloadServiceBean.java | 102 ++++++- .../edu/harvard/iq/dataverse/FilePage.java | 6 + .../iq/dataverse/GuestbookResponse.java | 13 +- .../iq/dataverse/ManagePermissionsPage.java | 47 ++++ .../iq/dataverse/UserNotification.java | 2 +- .../UserNotificationServiceBean.java | 22 ++ .../users/AuthenticatedUser.java | 25 ++ .../command/impl/RequestAccessCommand.java | 33 ++- .../harvard/iq/dataverse/util/FileUtil.java | 66 +++++ .../harvard/iq/dataverse/util/MailUtil.java | 2 + src/main/webapp/dataset.xhtml | 24 +- .../file-download-button-fragment.xhtml | 74 ++--- src/main/webapp/file.xhtml | 40 ++- src/main/webapp/filesFragment.xhtml | 9 +- ...l => guestbook-terms-popup-fragment.xhtml} | 17 ++ .../iq/dataverse/util/FileUtilTest.java | 2 +- 23 files changed, 896 insertions(+), 173 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java rename src/main/webapp/{file-request-access-popup-fragment.xhtml => guestbook-terms-popup-fragment.xhtml} (65%) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 28d814d9844..c43800c57ce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -200,6 +200,17 @@ public String toString() { @OneToMany(mappedBy="dataFile", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) private List guestbookResponses; + @OneToMany(mappedBy="dataFile",fetch = FetchType.LAZY,cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST, CascadeType.REFRESH}) + private List fileAccessRequests; + + public List getFileAccessRequests(){ + return fileAccessRequests; + } + + public void setFileAccessRequests(List fARs){ + this.fileAccessRequests = fARs; + } + public List getGuestbookResponses() { return guestbookResponses; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 196f84b6877..449e8d351c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -154,6 +154,27 @@ public DataFile find(Object pk) { }*/ + public List findAll(List fileIds){ + List dataFiles = new ArrayList<>(); + + for (Long fileId : fileIds){ + dataFiles.add(find(fileId)); + } + + return dataFiles; + } + + public List findAll(String fileIdsAsString){ + ArrayList dataFileIds = new ArrayList<>(); + + String[] fileIds = fileIdsAsString.split(","); + for (String fId : fileIds){ + dataFileIds.add(Long.parseLong(fId)); + } + + return findAll(dataFileIds); + } + public DataFile findByGlobalId(String globalId) { return (DataFile) dvObjectService.findByGlobalId(globalId, DvObject.DType.DataFile); } @@ -350,6 +371,18 @@ public FileMetadata findMostRecentVersionFileIsIn(DataFile file) { return fileMetadatas.get(0); } } + + public List findAllCheapAndEasy(String fileIdsAsString){ + //assumption is that the fileIds are separated by ',' + ArrayList dataFilesFound = new ArrayList<>(); + String[] fileIds = fileIdsAsString.split(","); + DataFile df = this.findCheapAndEasy(Long.parseLong(fileIds[0])); + if(df != null){ + dataFilesFound.add(df); + } + + return dataFilesFound; + } public DataFile findCheapAndEasy(Long id) { DataFile dataFile; @@ -802,6 +835,7 @@ public void findFileMetadataOptimizedExperimental(Dataset owner, DatasetVersion dataFile.addFileAccessRequester(au); } + dataFile.setFileAccessRequesters(retrieveFileAccessRequesters(dataFile)); dataFiles.add(dataFile); filesMap.put(dataFile.getId(), i++); } @@ -821,6 +855,25 @@ public void findFileMetadataOptimizedExperimental(Dataset owner, DatasetVersion owner.setFiles(dataFiles); } + private List retrieveFileAccessRequesters(DataFile fileIn) { + List retList = new ArrayList<>(); + + // List requesters = em.createNativeQuery("select authenticated_user_id + // from fileaccessrequests where datafile_id = + // "+fileIn.getId()).getResultList(); + List requesters = em.createNativeQuery("select authenticated_user_id from fileaccessrequests where datafile_id = " + fileIn.getId() + " and request_state='CREATED'").getResultList(); + + for (Object userIdObj : requesters) { + Long userId = (Long) userIdObj; + AuthenticatedUser user = userService.find(userId); + if (user != null) { + retList.add(user); + } + } + + return retList; + } + private List retrieveFileMetadataForVersion(Dataset dataset, DatasetVersion version, List dataFiles, Map filesMap, Map categoryMap) { List retList = new ArrayList<>(); Map> categoryMetaMap = new HashMap<>(); diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 683b6687c8b..305e9a404e7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -858,6 +858,12 @@ public String getHarvestingDescription() { return null; } + public boolean hasEnabledGuestbook(){ + Guestbook gb = this.getGuestbook(); + + return ( gb != null && gb.isEnabled()); + } + @Override public boolean equals(Object object) { // TODO: Warning - this method won't work in the case the id fields are not set diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 33b598083da..8754c26a3b6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -366,6 +366,19 @@ public void setShowIngestSuccess(boolean showIngestSuccess) { this.showIngestSuccess = showIngestSuccess; } + private String termsGuestbookPopupAction = ""; + + public void setTermsGuestbookPopupAction(String popupAction){ + if(popupAction != null && popupAction.length() > 0){ + this.termsGuestbookPopupAction = popupAction; + } + + } + + public String getTermsGuestbookPopupAction(){ + return termsGuestbookPopupAction; + } + // TODO: Consider renaming "configureTools" to "fileConfigureTools". List configureTools = new ArrayList<>(); // TODO: Consider renaming "exploreTools" to "fileExploreTools". @@ -3137,9 +3150,14 @@ public void setValidateFilesOutcome(String validateFilesOutcome) { this.validateFilesOutcome = validateFilesOutcome; } - public boolean validateFilesForDownload(boolean guestbookRequired, boolean downloadOriginal) { - setSelectedDownloadableFiles(new ArrayList<>()); - setSelectedNonDownloadableFiles(new ArrayList<>()); + public boolean validateFilesForDownload(boolean downloadOriginal){ + if (this.selectedFiles.isEmpty()) { + PrimeFaces.current().executeScript("PF('selectFilesForDownload').show()"); + return false; + } else { + this.filterSelectedFiles(); + } + //assume Pass unless something bad happens setValidateFilesOutcome("Pass"); Long bytes = (long) 0; @@ -3170,6 +3188,13 @@ public boolean validateFilesForDownload(boolean guestbookRequired, boolean downl return false; } + +//QDRADA handle new state from + /*if (isTermsPopupRequired() || isGuestbookPopupRequiredAtDownload()){ + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"); + } + */ + // If some of the files were restricted and we had to drop them off the // list, and NONE of the files are left on the downloadable list // - we show them a "you're out of luck" popup: @@ -3183,10 +3208,11 @@ public boolean validateFilesForDownload(boolean guestbookRequired, boolean downl return true; } - if (guestbookRequired) { + //QDRADA - still needed? +/* if (guestbookRequired) { setValidateFilesOutcome("GuestbookRequired"); } - +*/ return true; } @@ -3208,6 +3234,67 @@ private void updateGuestbookResponse (boolean guestbookRequired, boolean downloa guestbookResponse.setDownloadtype("Download"); } + /*helper function to filter the selected files into , + and and for reuse*/ + + private boolean filterSelectedFiles(){ + setSelectedDownloadableFiles(new ArrayList<>()); + setSelectedNonDownloadableFiles(new ArrayList<>()); + setSelectedRestrictedFiles(new ArrayList<>()); + setSelectedUnrestrictedFiles(new ArrayList<>()); + + boolean someFiles = false; + for (FileMetadata fmd : this.selectedFiles){ + if(this.fileDownloadHelper.canDownloadFile(fmd)){ + getSelectedDownloadableFiles().add(fmd); + someFiles=true; + } else { + getSelectedNonDownloadableFiles().add(fmd); + } + if(fmd.isRestricted()){ + getSelectedRestrictedFiles().add(fmd); //might be downloadable to user or not + someFiles=true; + } else { + getSelectedUnrestrictedFiles().add(fmd); + someFiles=true; + } + + } + return someFiles; + } + + public void validateFilesForRequestAccess(){ + this.filterSelectedFiles(); + + if(!dataset.isFileAccessRequest()){ //is this needed? wouldn't be able to click Request Access if this !isFileAccessRequest() + return; + } + + if(!this.selectedRestrictedFiles.isEmpty()){ + ArrayList nonDownloadableRestrictedFiles = new ArrayList<>(); + + List userRequestedDataFiles = ((AuthenticatedUser) session.getUser()).getRequestedDataFiles(); + + for(FileMetadata fmd : this.selectedRestrictedFiles){ + if(!this.fileDownloadHelper.canDownloadFile(fmd) && !userRequestedDataFiles.contains(fmd.getDataFile())){ + nonDownloadableRestrictedFiles.add(fmd); + } + } + + if(!nonDownloadableRestrictedFiles.isEmpty()){ + guestbookResponse.setDataFile(null); + guestbookResponse.setSelectedFileIds(this.getFilesIdsString(nonDownloadableRestrictedFiles)); + + if(this.isGuestbookAndTermsPopupRequired()){ //need to pop up the guestbook and terms dialog + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"); + } else { + this.requestAccessMultipleFiles(); + } + } else { + //popup select data files + } + } + } /*helper function to filter the selected files into , and and for reuse*/ @@ -3295,26 +3382,23 @@ public void toggleAllSelected(){ // helper Method public String getSelectedFilesIdsString() { - String downloadIdString = ""; - for (FileMetadata fmd : this.selectedFiles){ - if (!StringUtil.isEmpty(downloadIdString)) { - downloadIdString += ","; - } - downloadIdString += fmd.getDataFile().getId(); - } - return downloadIdString; + return this.getFilesIdsString(this.selectedFiles); } - + // helper Method public String getSelectedDownloadableFilesIdsString() { - String downloadIdString = ""; - for (FileMetadata fmd : this.selectedDownloadableFiles){ - if (!StringUtil.isEmpty(downloadIdString)) { - downloadIdString += ","; + return this.getFilesIdsString(this.selectedDownloadableFiles); + } + + public String getFilesIdsString(List fileMetadatas){ //for reuse + String idString = ""; + for (FileMetadata fmd : fileMetadatas){ + if (!StringUtil.isEmpty(idString)) { + idString += ","; } - downloadIdString += fmd.getDataFile().getId(); + idString += fmd.getDataFile().getId(); } - return downloadIdString; + return idString; } @@ -5221,6 +5305,10 @@ public boolean isDownloadPopupRequired() { public boolean isRequestAccessPopupRequired() { return FileUtil.isRequestAccessPopupRequired(workingVersion); } + + public boolean isGuestbookAndTermsPopupRequired() { + return FileUtil.isGuestbookAndTermsPopupRequired(workingVersion); + } public String requestAccessMultipleFiles() { @@ -5236,11 +5324,11 @@ public String requestAccessMultipleFiles() { for (FileMetadata fmd : selectedFiles){ fileDownloadHelper.addMultipleFilesForRequestAccess(fmd.getDataFile()); } - if (isRequestAccessPopupRequired()) { + if (isGuestbookAndTermsPopupRequired()) { //RequestContext requestContext = RequestContext.getCurrentInstance(); - PrimeFaces.current().executeScript("PF('requestAccessPopup').show()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show()"); //the popup will call writeGuestbookAndRequestAccess(); return ""; - } else { + }else { //No popup required fileDownloadHelper.requestAccessIndirect(); return ""; diff --git a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java index 76c5df4409a..723a54c8587 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java @@ -1,91 +1,237 @@ package edu.harvard.iq.dataverse; -import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; - -import javax.persistence.Column; -import javax.persistence.Embeddable; -import javax.persistence.EmbeddedId; +import java.io.Serializable; +import javax.persistence.CascadeType; +import javax.persistence.GeneratedValue; +import javax.persistence.UniqueConstraint; import javax.persistence.Entity; +import javax.persistence.Table; +import javax.persistence.Index; +import javax.persistence.Id; import javax.persistence.JoinColumn; +import javax.persistence.JoinTable; import javax.persistence.ManyToOne; -import javax.persistence.MapsId; -import javax.persistence.Table; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; -import java.io.Serializable; -import java.util.Date; +import javax.persistence.OneToOne; +import javax.persistence.EnumType; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.datavariable.DataVariable; +import javax.persistence.Column; +import javax.persistence.Enumerated; +import javax.persistence.GenerationType; +import javax.persistence.NamedQueries; +import javax.persistence.NamedQuery; + +/** + * + * @author Marina + */ @Entity -@Table(name = "fileaccessrequests") -public class FileAccessRequest { - @EmbeddedId - private FileAccessRequestKey id; +@Table(name = "fileaccessrequests", //having added the guestbookresponse_id column to fileaccessrequests + uniqueConstraints=@UniqueConstraint(columnNames={"datafile_id", "authenticated_user_id","request_state"}) //this may not make sense at some future point +) + +@NamedQueries({ + @NamedQuery(name = "FileAccessRequest.findByAuthenticatedUserId", + query = "SELECT far FROM FileAccessRequest far WHERE far.user.id=:authenticatedUserId"), + @NamedQuery(name = "FileAccessRequest.findByGuestbookResponseId", + query = "SELECT far FROM FileAccessRequest far WHERE far.guestbookResponse.id=:guestbookResponseId"), + @NamedQuery(name = "FileAccessRequest.findByDataFileId", + query = "SELECT far FROM FileAccessRequest far WHERE far.dataFile.id=:dataFileId"), + @NamedQuery(name = "FileAccessRequest.findByRequestState", + query = "SELECT far FROM FileAccessRequest far WHERE far.requestState=:requestState"), + @NamedQuery(name = "FileAccessRequest.findByAuthenticatedUserIdAndRequestState", + query = "SELECT far FROM FileAccessRequest far WHERE far.user.id=:authenticatedUserId and far.requestState=:requestState"), + @NamedQuery(name = "FileAccessRequest.findByGuestbookResponseIdAndRequestState", + query = "SELECT far FROM FileAccessRequest far WHERE far.guestbookResponse.id=:guestbookResponseId and far.requestState=:requestState"), + @NamedQuery(name = "FileAccessRequest.findByDataFileIdAndRequestState", + query = "SELECT far FROM FileAccessRequest far WHERE far.dataFile.id=:dataFileId and far.requestState=:requestState"), + @NamedQuery(name = "FileAccessRequest.findByAuthenticatedUserIdAndDataFileIdAndRequestState", + query = "SELECT far FROM FileAccessRequest far WHERE far.user.id=:authenticatedUserId and far.dataFile.id=:dataFileId and far.requestState=:requestState") +}) + +public class FileAccessRequest implements Serializable{ + private static final long serialVersionUID = 1L; + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + @ManyToOne - @MapsId("dataFile") - @JoinColumn(name = "datafile_id") + @JoinColumn(nullable=false) private DataFile dataFile; + @ManyToOne - @MapsId("authenticatedUser") - @JoinColumn(name = "authenticated_user_id") - private AuthenticatedUser authenticatedUser; - - @Temporal(value = TemporalType.TIMESTAMP) - @Column(name = "creation_time") - private Date creationTime; - - public FileAccessRequestKey getId() { + @JoinColumn(name="authenticated_user_id",nullable=false) + private AuthenticatedUser user; + + @OneToOne + @JoinColumn(nullable=true) + private GuestbookResponse guestbookResponse; + + public enum RequestState {CREATED,EDITED,GRANTED,REJECTED,RESUBMIT,INVALIDATED,CLOSED}; + //private RequestState state; + @Enumerated(EnumType.STRING) + @Column(name="request_state", nullable=false ) + private RequestState requestState; + + public FileAccessRequest(){ + + } + + public FileAccessRequest(DataFile df, AuthenticatedUser au){ + setDataFile(df); + setRequester(au); + setState(RequestState.CREATED); + } + + public FileAccessRequest(DataFile df, AuthenticatedUser au, GuestbookResponse gbr){ + setDataFile(df); + setRequester(au); + setGuestbookResponse(gbr); + setState(RequestState.CREATED); + } + + public Long getId() { return id; } - public void setId(FileAccessRequestKey id) { + public void setId(Long id) { this.id = id; } - - public DataFile getDataFile() { + + public DataFile getDataFile(){ return dataFile; } - - public void setDataFile(DataFile dataFile) { - this.dataFile = dataFile; + + public final void setDataFile(DataFile df){ + this.dataFile = df; + } + + public AuthenticatedUser getRequester(){ + return user; + } + + public final void setRequester(AuthenticatedUser au){ + this.user = au; + } + + public GuestbookResponse getGuestbookResponse(){ + return guestbookResponse; + } + + public final void setGuestbookResponse(GuestbookResponse gbr){ + this.guestbookResponse = gbr; + } + + public RequestState getState() { + return this.requestState; + } + + public void setState(RequestState requestState) { + this.requestState = requestState; + } + + public String getStateLabel() { + if(isStateCreated()){ + return "created"; + } + if(isStateEdited()) { + return "edited"; + } + if(isStateGranted()) { + return "granted"; + } + if(isStateRejected()) { + return "rejected"; + } + if(isStateResubmit()) { + return "resubmit"; + } + if(isStateInvalidated()) { + return "invalidated"; + } + if(isStateClosed()) { + return "closed"; + } + return null; + } + + public void setStateCreated() { + this.requestState = RequestState.CREATED; + } + + public void setStateEdited() { + this.requestState = RequestState.EDITED; + } + + public void setStateGranted() { + this.requestState = RequestState.GRANTED; } - public AuthenticatedUser getAuthenticatedUser() { - return authenticatedUser; + public void setStateRejected() { + this.requestState = RequestState.REJECTED; } - public void setAuthenticatedUser(AuthenticatedUser authenticatedUser) { - this.authenticatedUser = authenticatedUser; + public void setStateResubmit() { + this.requestState = RequestState.RESUBMIT; + } + + public void setStateInvalidated() { + this.requestState = RequestState.INVALIDATED; } - public Date getCreationTime() { - return creationTime; + public void setStateClosed() { + this.requestState = RequestState.CLOSED; } - public void setCreationTime(Date creationTime) { - this.creationTime = creationTime; + + public boolean isStateCreated() { + return this.requestState == RequestState.CREATED; + } + + public boolean isStateEdited() { + return this.requestState == RequestState.EDITED; + } + + public boolean isStateGranted() { + return this.requestState == RequestState.GRANTED; } - @Embeddable - public static class FileAccessRequestKey implements Serializable { - @Column(name = "datafile_id") - private Long dataFile; - @Column(name = "authenticated_user_id") - private Long authenticatedUser; + public boolean isStateRejected() { + return this.requestState == RequestState.REJECTED; + } - public Long getDataFile() { - return dataFile; - } + public boolean isStateResubmit() { + return this.requestState == RequestState.RESUBMIT; + } + + public boolean isStateInvalidated() { + return this.requestState == RequestState.INVALIDATED; + } - public void setDataFile(Long dataFile) { - this.dataFile = dataFile; - } + public boolean isStateClosed() { + return this.requestState == RequestState.CLOSED; + } + + @Override + public int hashCode() { + int hash = 0; + hash += (id != null ? id.hashCode() : 0); + return hash; + } - public Long getAuthenticatedUser() { - return authenticatedUser; + @Override + public boolean equals(Object object) { + // TODO: Warning - this method won't work in the case the id fields are not set + if (!(object instanceof FileAccessRequest)) { + return false; } - - public void setAuthenticatedUser(Long authenticatedUser) { - this.authenticatedUser = authenticatedUser; + FileAccessRequest other = (FileAccessRequest) object; + if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { + return false; } + return true; } -} + + +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java new file mode 100644 index 00000000000..215e4695a75 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java @@ -0,0 +1,89 @@ +package edu.harvard.iq.dataverse; + +import java.util.List; +import javax.ejb.Stateless; +import javax.inject.Named; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; + +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; + +/** + * + * @author Marina + */ +@Stateless +@Named +public class FileAccessRequestServiceBean { + + @PersistenceContext(unitName = "VDCNet-ejbPU") + private EntityManager em; + + public FileAccessRequest find(Object pk) { + return em.find(FileAccessRequest.class, pk); + } + + public List findAll() { + return em.createQuery("select object(o) from FileAccessRequest as o order by o.id", FileAccessRequest.class).getResultList(); + } + + public List findAll(Long authenticatedUserId, Long fileId, FileAccessRequest.RequestState requestState){ + return em.createNamedQuery("FileAccessRequest.findByAuthenticatedUserIdAndDataFileIdAndRequestState", FileAccessRequest.class) + .setParameter("authenticatedUserId",authenticatedUserId) + .setParameter("dataFileId",fileId) + .setParameter("requestState",requestState) + .getResultList(); + } + + public List findAllByAuthenticedUserId(Long authenticatedUserId){ + return em.createNamedQuery("FileAccessRequest.findByAuthenticatedUserId", FileAccessRequest.class) + .setParameter("authenticatedUserId", authenticatedUserId) + .getResultList(); + } + + public List findAllByGuestbookResponseId(Long guestbookResponseId){ + return em.createNamedQuery("FileAccessRequest.findByGuestbookResponseId", FileAccessRequest.class) + .setParameter("guestbookResponseId", guestbookResponseId) + .getResultList(); + + } + + public List findAllByDataFileId(Long dataFileId){ + return em.createNamedQuery("FileAccessRequest.findByDataFileId", FileAccessRequest.class) + .setParameter("dataFileId", dataFileId) + .getResultList(); + } + + public List findAllByAuthenticatedUserIdAndRequestState(Long authenticatedUserId, FileAccessRequest.RequestState requestState){ + return em.createNamedQuery("FileAccessRequest.findByAuthenticatedUserIdAndRequestState", FileAccessRequest.class) + .setParameter("authenticatedUserId", authenticatedUserId) + .setParameter("requestState",requestState) + .getResultList(); + } + + public List findAllByGuestbookResponseIdAndRequestState(Long guestbookResponseId, FileAccessRequest.RequestState requestState){ + return em.createNamedQuery("FileAccessRequest.findByGuestbookResponseIdAndRequestState", FileAccessRequest.class) + .setParameter("dataFileId", guestbookResponseId) + .setParameter("requestState",requestState) + .getResultList(); + } + + public List findAllByDataFileIdAndRequestState(Long dataFileId, FileAccessRequest.RequestState requestState){ + return em.createNamedQuery("FileAccessRequest.findByDataFileIdAndRequestState", FileAccessRequest.class) + .setParameter("dataFileId", dataFileId) + .setParameter("requestState",requestState) + .getResultList(); + } + + + public FileAccessRequest save(FileAccessRequest far) { + if (far.getId() == null) { + em.persist(far); + return far; + } else { + return em.merge(far); + } + } + + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java index 850efc2f1ae..e44aeafcc4d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java @@ -71,7 +71,7 @@ private boolean testResponseLength(String value) { // file downloads and multiple (batch) downloads - sice both use the same // terms/etc. popup. public void writeGuestbookAndStartDownload(GuestbookResponse guestbookResponse) { - PrimeFaces.current().executeScript("PF('downloadPopup').hide()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); guestbookResponse.setDownloadtype("Download"); // Note that this method is only ever called from the file-download-popup - // meaning we know for the fact that we DO want to save this @@ -91,7 +91,7 @@ public void writeGuestbookAndStartDownload(GuestbookResponse guestbookResponse) public void writeGuestbookAndOpenSubset(GuestbookResponse guestbookResponse) { - PrimeFaces.current().executeScript("PF('downloadPopup').hide()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); PrimeFaces.current().executeScript("PF('downloadDataSubsetPopup').show()"); guestbookResponse.setDownloadtype("Subset"); fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); @@ -132,17 +132,23 @@ public void writeGuestbookAndLaunchExploreTool(GuestbookResponse guestbookRespon fileDownloadService.explore(guestbookResponse, fmd, externalTool); //requestContext.execute("PF('downloadPopup').hide()"); - PrimeFaces.current().executeScript("PF('downloadPopup').hide()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); } public void writeGuestbookAndLaunchPackagePopup(GuestbookResponse guestbookResponse) { - PrimeFaces.current().executeScript("PF('downloadPopup').hide()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); PrimeFaces.current().executeScript("PF('downloadPackagePopup').show()"); PrimeFaces.current().executeScript("handleResizeDialog('downloadPackagePopup')"); fileDownloadService.writeGuestbookResponseRecord(guestbookResponse); } + public void writeGuestbookResponseAndRequestAccess(GuestbookResponse guestbookResponse) { + //requestContext.execute("PF('guestbookAndTermsPopup').hide()"); + PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()"); + fileDownloadService.writeGuestbookResponseAndRequestAccess(guestbookResponse); + } + /** * Writes a guestbook entry for either popup scenario: guestbook or terms. */ @@ -307,13 +313,13 @@ public void requestAccessMultiple(List files) { } } if (notificationFile != null && succeeded) { - fileDownloadService.sendRequestFileAccessNotification(notificationFile, (AuthenticatedUser) session.getUser()); + fileDownloadService.sendRequestFileAccessNotification(notificationFile.getOwner(), notificationFile.getId(), (AuthenticatedUser) session.getUser()); } } public void requestAccessIndirect() { //Called when there are multiple files and no popup - // or there's a popup with sigular or multiple files + // or there's a popup with singular or multiple files // The list of files for Request Access is set in the Dataset Page when // user clicks the request access button in the files fragment // (and has selected one or more files) @@ -329,8 +335,16 @@ private boolean processRequestAccess(DataFile file, Boolean sendNotification) { // create notification if necessary if (sendNotification) { - fileDownloadService.sendRequestFileAccessNotification(file, user); - } + fileDownloadService.sendRequestFileAccessNotification(file.getOwner(), file.getId(), (AuthenticatedUser) session.getUser()); + } + //ToDO QDRADA - where to write the response? + /* + //write the guestbookResponse if there is an enabled guestbook + GuestbookResponse gbr = this.getGuestbookResponse(); //can we be sure this is the correct guestbookResponse?? - can it get out of sync?? + if( gbr != null && gbr.getGuestbook().isEnabled() ){ + fileDownloadService.writeGuestbookResponseRecordForRequestAccess(gbr); + } + */ JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("file.accessRequested.success")); return true; } diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index a90489be29a..f7612300eaf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -76,6 +76,8 @@ public class FileDownloadServiceBean implements java.io.Serializable { PrivateUrlServiceBean privateUrlService; @EJB SettingsServiceBean settingsService; + @EJB + MailServiceBean mailService; @Inject DataverseSession session; @@ -192,6 +194,38 @@ public void writeGuestbookAndStartFileDownload(GuestbookResponse guestbookRespon redirectToDownloadAPI(guestbookResponse.getFileFormat(), guestbookResponse.getDataFile().getId()); logger.fine("issued file download redirect for datafile "+guestbookResponse.getDataFile().getId()); } + + public void writeGuestbookResponseAndRequestAccess(GuestbookResponse guestbookResponse){ + if (guestbookResponse == null || ( guestbookResponse.getDataFile() == null && guestbookResponse.getSelectedFileIds() == null) ) { + return; + } + + List selectedDataFiles = new ArrayList<>(); //always make sure it's at least an empty List + + if(guestbookResponse.getDataFile() != null ){ //one file 'selected' by 'Request Access' button click + selectedDataFiles.add(datafileService.find(guestbookResponse.getDataFile().getId())); //don't want the findCheapAndEasy + } + + if(guestbookResponse.getSelectedFileIds() != null && !guestbookResponse.getSelectedFileIds().isEmpty()) { //multiple selected through multi-select REquest Access button + selectedDataFiles = datafileService.findAll(guestbookResponse.getSelectedFileIds()); + } + + int countRequestAccessSuccess = 0; + + for(DataFile dataFile : selectedDataFiles){ + guestbookResponse.setDataFile(dataFile); + writeGuestbookResponseRecordForRequestAccess(guestbookResponse); + if(requestAccess(dataFile,guestbookResponse)){ + countRequestAccessSuccess++; + } + } + + if(countRequestAccessSuccess > 0){ + DataFile firstDataFile = selectedDataFiles.get(0); + sendRequestFileAccessNotification(firstDataFile.getOwner(), firstDataFile.getId(), (AuthenticatedUser) session.getUser()); + } + + } public void writeGuestbookResponseRecord(GuestbookResponse guestbookResponse, FileMetadata fileMetadata, String format) { if(!fileMetadata.getDatasetVersion().isDraft()){ @@ -221,6 +255,18 @@ public void writeGuestbookResponseRecord(GuestbookResponse guestbookResponse) { } } + public void writeGuestbookResponseRecordForRequestAccess(GuestbookResponse guestbookResponse) { + try { + CreateGuestbookResponseCommand cmd = new CreateGuestbookResponseCommand(dvRequestService.getDataverseRequest(), guestbookResponse, guestbookResponse.getDataset()); + commandEngine.submit(cmd); + + } catch (CommandException e) { + //if an error occurs here then download won't happen no need for response recs... + logger.info("Failed to writeGuestbookResponseRecord for RequestAccess"); + } + + } + // The "guestBookRecord(s)AlreadyWritten" parameter in the 2 methods // below (redirectToBatchDownloadAPI() and redirectToDownloadAPI(), for the // multiple- and single-file downloads respectively) are passed to the @@ -499,15 +545,63 @@ public boolean requestAccess(Long fileId) { } } return false; - } + } + + public boolean requestAccess(DataFile dataFile, GuestbookResponse gbr){ + boolean accessRequested = false; + if (dvRequestService.getDataverseRequest().getAuthenticatedUser() == null){ + return accessRequested; + } + + List fARs = dataFile.getFileAccessRequesters(); + + if(fARs.isEmpty() || (!fARs.isEmpty() && !fARs.contains((AuthenticatedUser)session.getUser()))){ + try { + commandEngine.submit(new RequestAccessCommand(dvRequestService.getDataverseRequest(), dataFile, gbr)); + accessRequested = true; + } catch (CommandException ex) { + logger.info("Unable to request access for file id " + dataFile.getId() + ". Exception: " + ex); + } + } + + return accessRequested; + } - public void sendRequestFileAccessNotification(DataFile datafile, AuthenticatedUser requestor) { - permissionService.getUsersWithPermissionOn(Permission.ManageFilePermissions, datafile).stream().forEach((au) -> { - userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.REQUESTFILEACCESS, datafile.getId(), null, requestor, false); + public void sendRequestFileAccessNotification(Dataset dataset, Long fileId, AuthenticatedUser requestor) { + permissionService.getUsersWithPermissionOn(Permission.ManageDatasetPermissions, dataset).stream().forEach((au) -> { + userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.REQUESTFILEACCESS, fileId, null, requestor, false); }); } + public void sendRequestFileAccessNotification(Dataset dataset, Long fileId, GuestbookResponse gb){ + Timestamp ts = new Timestamp(new Date().getTime()); + UserNotification un = null; + + //String appendMsgText = (gb == null)?("") : this.getGuestbookAppendEmailDetails(gb); + String appendMsgText = ""; + + //first send a notification for all the Users that have ManageDatasetPermissions a notification that a user has requested accedd + List mngDsPermUsers = permissionService.getUsersWithPermissionOn(Permission.ManageDatasetPermissions, dataset); + + for (AuthenticatedUser au : mngDsPermUsers){ + un = userNotificationService.sendUserNotification(au, ts, UserNotification.Type.REQUESTFILEACCESS, fileId); + + if(un != null){ + + boolean mailed = mailService.sendNotificationEmail(un, appendMsgText, (AuthenticatedUser)session.getUser(),false); + if(mailed){ + un.setEmailed(true); + userNotificationService.save(un); + } + } + } + + //send the user that requested access a notification that they requested the access + userNotificationService.sendNotification((AuthenticatedUser) session.getUser(), ts, UserNotification.Type.REQUESTEDFILEACCESS, fileId); + } + + public String generateServiceKey() { UUID uid = UUID.randomUUID(); // last 8 bytes, of the random UUID, 16 hex digits: diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index e6d5cc75ca3..5845d65889e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -318,6 +318,12 @@ public boolean isRequestAccessPopupRequired() { return FileUtil.isRequestAccessPopupRequired(fileMetadata.getDatasetVersion()); } + public boolean isGuestbookAndTermsPopupRequired() { + if(fileMetadata.getId() == null || fileMetadata.getDatasetVersion().getId() == null ){ + return false; + } + return FileUtil.isGuestbookAndTermsPopupRequired(fileMetadata.getDatasetVersion()); + } public void setFileMetadata(FileMetadata fileMetadata) { this.fileMetadata = fileMetadata; diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 69404482fce..5c39d1039d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -65,8 +65,12 @@ public class GuestbookResponse implements Serializable { @JoinColumn(nullable=true) private AuthenticatedUser authenticatedUser; - @OneToOne(cascade=CascadeType.ALL,mappedBy="guestbookResponse",fetch = FetchType.LAZY, optional = false) + @OneToOne(cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST},mappedBy="guestbookResponse",fetch = FetchType.LAZY) private FileDownload fileDownload; + + @OneToMany(mappedBy="guestbookResponse",cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST},fetch = FetchType.LAZY) + //private FileAccessRequest fileAccessRequest; + private List fileAccessRequests; @OneToMany(mappedBy="guestbookResponse",cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST},orphanRemoval=true) @OrderBy ("id") @@ -253,6 +257,13 @@ public void setFileDownload(FileDownload fDownload){ this.fileDownload = fDownload; } + public List getFileAccessRequests(){ + return fileAccessRequests; + } + + public void setFileAccessRequest(List fARs){ + this.fileAccessRequests = fARs; + } public Dataset getDataset() { return dataset; diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index e71e04bc42f..173af4c241f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -56,6 +56,8 @@ public class ManagePermissionsPage implements java.io.Serializable { @EJB DvObjectServiceBean dvObjectService; @EJB + FileAccessRequestServiceBean fileAccessRequestService; + @EJB DataverseRoleServiceBean roleService; @EJB RoleAssigneeServiceBean roleAssigneeService; @@ -400,6 +402,51 @@ public List completeRoleAssignee( String query ) { return roleAssigneeService.filterRoleAssignees(query, dvObject, roleAssignSelectedRoleAssignees); } + public void grantAccess(ActionEvent evt) { + //QDRADA + logger.info("grantAccess Called"); + try { + throw new Exception("grantAccessCalled"); + } catch (Exception e) { + e.printStackTrace(); + } + /* + // Find the built in file downloader role (currently by alias) + DataverseRole fileDownloaderRole = roleService.findBuiltinRoleByAlias(DataverseRole.FILE_DOWNLOADER); + for (RoleAssignee roleAssignee : selectedRoleAssignees) { + boolean sendNotification = false; + for (DataFile file : selectedFiles) { + if (assignRole(roleAssignee, file, fileDownloaderRole)) { + if (file.isReleased()) { + sendNotification = true; + } + // remove request, if it exist + for (AuthenticatedUser au : roleAssigneeService.getExplicitUsers(roleAssignee)) { + if (file.getFileAccessRequesters().remove(au)) { + List fileAccessRequests = fileAccessRequestService.findAllByAuthenticatedUserIdAndRequestState(au.getId(), FileAccessRequest.RequestState.CREATED); + for(FileAccessRequest far : fileAccessRequests){ + far.setStateGranted(); + fileAccessRequestService.save(far); + } + file.setFileAccessRequests(fileAccessRequests); + datafileService.save(file); + } + } + } + + } + + if (sendNotification) { + for (AuthenticatedUser au : roleAssigneeService.getExplicitUsers(roleAssignee)) { + userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.GRANTFILEACCESS, dataset.getId()); + } + } + } + + initMaps(); + */ + } + public List getAvailableRoles() { List roles = new LinkedList<>(); if (dvObject != null && dvObject.getId() != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java index b68a1b9d13e..c91f7630caa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java @@ -39,7 +39,7 @@ public enum Type { CHECKSUMIMPORT, CHECKSUMFAIL, CONFIRMEMAIL, APIGENERATED, INGESTCOMPLETED, INGESTCOMPLETEDWITHERRORS, PUBLISHFAILED_PIDREG, WORKFLOW_SUCCESS, WORKFLOW_FAILURE, STATUSUPDATED, DATASETCREATED, DATASETMENTIONED, GLOBUSUPLOADCOMPLETED, GLOBUSUPLOADCOMPLETEDWITHERRORS, - GLOBUSDOWNLOADCOMPLETED, GLOBUSDOWNLOADCOMPLETEDWITHERRORS; + GLOBUSDOWNLOADCOMPLETED, GLOBUSDOWNLOADCOMPLETEDWITHERRORS, REQUESTEDFILEACCESS; public String getDescription() { return BundleUtil.getStringFromBundle("notification.typeDescription." + this.name()); diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java index 947ee3ce989..972f26f6830 100644 --- a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java @@ -131,6 +131,28 @@ public void sendNotification(AuthenticatedUser dataverseUser, Timestamp sendDate save(userNotification); } } + + /** + * Returns a UserNotification that was sent to a dataverseUser. + * Sends ONLY the UserNotification (no email is sent via this method). + * All parameters are assumed to be valid, non-null objects. + * + * @param dataverseUser - the AuthenticatedUser to whom the notification is to be sent + * @param sendDate - the time and date the notification was sent. + * @param type - the type of notification to be sent (see UserNotification for the different types) + * @param objectId - the ID of the Dataverse object (Dataverse, Dataset, etc.) that the notification pertains to + * @return The UserNotification that was sent to the dataverseUser + */ + + public UserNotification sendUserNotification(AuthenticatedUser dataverseUser, Timestamp sendDate, Type type, Long objectId) { + UserNotification userNotification = new UserNotification(); + userNotification.setUser(dataverseUser); + userNotification.setSendDate(sendDate); + userNotification.setType(type); + userNotification.setObjectId(objectId); + this.save(userNotification); + return userNotification; + } public boolean isEmailMuted(UserNotification userNotification) { final Type type = userNotification.getType(); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index 9fdfce2f1a7..bb688fb8acb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -1,7 +1,9 @@ package edu.harvard.iq.dataverse.authorization.users; import edu.harvard.iq.dataverse.Cart; +import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DatasetLock; +import edu.harvard.iq.dataverse.FileAccessRequest; import edu.harvard.iq.dataverse.UserNotification.Type; import edu.harvard.iq.dataverse.UserNotification; import edu.harvard.iq.dataverse.validation.ValidateEmail; @@ -17,6 +19,7 @@ import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import java.io.Serializable; import java.sql.Timestamp; +import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; @@ -28,6 +31,7 @@ import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; +import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; @@ -202,6 +206,27 @@ public void setDatasetLocks(List datasetLocks) { @OneToMany(mappedBy = "user", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) private List oAuth2TokenDatas; + /*for many to many fileAccessRequests*/ + @OneToMany(mappedBy = "user", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST, CascadeType.REFRESH}, fetch = FetchType.LAZY) + private List fileAccessRequests; + + public List getFileAccessRequests() { + return fileAccessRequests; + } + + public void setFileAccessRequests(List fARs) { + this.fileAccessRequests = fARs; + } + + public List getRequestedDataFiles(){ + List requestedDataFiles = new ArrayList<>(); + + for(FileAccessRequest far : getFileAccessRequests()){ + requestedDataFiles.add(far.getDataFile()); + } + return requestedDataFiles; + } + @Override public AuthenticatedUserDisplayInfo getDisplayInfo() { return new AuthenticatedUserDisplayInfo(firstName, lastName, email, affiliation, position); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java index d710ed66551..f6a3b287778 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java @@ -5,7 +5,11 @@ */ package edu.harvard.iq.dataverse.engine.command.impl; +import java.util.logging.Logger; + import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.FileAccessRequest; +import edu.harvard.iq.dataverse.GuestbookResponse; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; @@ -23,8 +27,11 @@ @RequiredPermissions({}) public class RequestAccessCommand extends AbstractCommand { + private static final Logger logger = Logger.getLogger(RequestAccessCommand.class.getName()); + private final DataFile file; private final AuthenticatedUser requester; + private final FileAccessRequest fileAccessRequest; private final Boolean sendNotification; @@ -33,6 +40,7 @@ public RequestAccessCommand(DataverseRequest dvRequest, DataFile file) { super(dvRequest, file); this.file = file; this.requester = (AuthenticatedUser) dvRequest.getUser(); + this.fileAccessRequest = new FileAccessRequest(file,requester); this.sendNotification = false; } @@ -41,8 +49,27 @@ public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, Boolean s super(dvRequest, file); this.file = file; this.requester = (AuthenticatedUser) dvRequest.getUser(); + this.fileAccessRequest = new FileAccessRequest(file,requester); this.sendNotification = sendNotification; } + + public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr) { + // for data file check permission on owning dataset + super(dvRequest, file); + this.file = file; + this.requester = (AuthenticatedUser) dvRequest.getUser(); + this.fileAccessRequest = new FileAccessRequest(file,requester,gbr); + this.sendNotification = false; + } + + public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr, Boolean sendNotification) { + // for data file check permission on owning dataset + super(dvRequest, file); + this.file = file; + this.requester = (AuthenticatedUser) dvRequest.getUser(); + this.fileAccessRequest = new FileAccessRequest(file,requester,gbr); + this.sendNotification = sendNotification; + } public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr) { @@ -75,9 +102,13 @@ public DataFile execute(CommandContext ctxt) throws CommandException { if(FileUtil.isActivelyEmbargoed(file)) { throw new CommandException(BundleUtil.getStringFromBundle("file.requestAccess.notAllowed.embargoed"), this); } + file.getFileAccessRequests().add(fileAccessRequest); file.addFileAccessRequester(requester); + requester.getFileAccessRequests().add(fileAccessRequest); if (sendNotification) { - ctxt.fileDownload().sendRequestFileAccessNotification(this.file, requester); + //QDRADA + logger.info("ctxt.fileDownload().sendRequestFileAccessNotification(this.file, requester);"); + //ctxt.fileDownload().sendRequestFileAccessNotification(this.file, requester); } return ctxt.files().save(file); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 6bb7e1d583b..a5fb98f7c49 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -28,6 +28,7 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Embargo; import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.dataaccess.S3AccessIO; @@ -1639,6 +1640,71 @@ private static Boolean popupDueToStateOrTerms(DatasetVersion datasetVersion) { return null; } + /** + * isGuestbookAndTermsPopupRequired + * meant to replace both isDownloadPopupRequired() and isRequestAccessDownloadPopupRequired() when the guestbook-terms-popup-fragment.xhtml + * replaced file-download-popup-fragment.xhtml and file-request-access-popup-fragment.xhtml + * @param datasetVersion + * @return boolean + */ + + public static boolean isGuestbookAndTermsPopupRequired(DatasetVersion datasetVersion) { + return isGuestbookPopupRequired(datasetVersion) || isTermsPopupRequired(datasetVersion); + } + + public static boolean isGuestbookPopupRequired(DatasetVersion datasetVersion) { + + if (datasetVersion == null) { + logger.fine("GuestbookPopup not required because datasetVersion is null."); + return false; + } + //0. if version is draft then Popup "not required" + if (!datasetVersion.isReleased()) { + logger.fine("GuestbookPopup not required because datasetVersion has not been released."); + return false; + } + + // 3. Guest Book: + if (datasetVersion.getDataset() != null && datasetVersion.getDataset().getGuestbook() != null && datasetVersion.getDataset().getGuestbook().isEnabled() && datasetVersion.getDataset().getGuestbook().getDataverse() != null) { + logger.fine("GuestbookPopup required because an enabled guestbook exists."); + return true; + } + + logger.fine("GuestbookPopup is not required."); + return false; + } + + public static boolean isTermsPopupRequired(DatasetVersion datasetVersion) { + + if (datasetVersion == null) { + logger.fine("TermsPopup not required because datasetVersion is null."); + return false; + } + //0. if version is draft then Popup "not required" + if (!datasetVersion.isReleased()) { + logger.fine("TermsPopup not required because datasetVersion has not been released."); + return false; + } + // 1. License and Terms of Use: + if (datasetVersion.getTermsOfUseAndAccess() != null) { + if (!License.CC0.equals(datasetVersion.getTermsOfUseAndAccess().getLicense()) + && !(datasetVersion.getTermsOfUseAndAccess().getTermsOfUse() == null + || datasetVersion.getTermsOfUseAndAccess().getTermsOfUse().equals(""))) { + logger.fine("TermsPopup required because of license or terms of use."); + return true; + } + + // 2. Terms of Access: + if (!(datasetVersion.getTermsOfUseAndAccess().getTermsOfAccess() == null) && !datasetVersion.getTermsOfUseAndAccess().getTermsOfAccess().equals("")) { + logger.fine("TermsPopup required because of terms of access."); + return true; + } + } + + logger.fine("TermsPopup is not required."); + return false; + } + /** * Provide download URL if no Terms of Use, no guestbook, and not * restricted. diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java index 72980c3451a..d166cc753cc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java @@ -51,6 +51,8 @@ public static String getSubjectTextBasedOnNotification(UserNotification userNoti return BundleUtil.getStringFromBundle("notification.email.create.dataverse.subject", rootDvNameAsList); case REQUESTFILEACCESS: return BundleUtil.getStringFromBundle("notification.email.request.file.access.subject", rootDvNameAsList); + case REQUESTEDFILEACCESS: + return BundleUtil.getStringFromBundle("notification.email.requested.file.access.subject", rootDvNameAsList); case GRANTFILEACCESS: return BundleUtil.getStringFromBundle("notification.email.grant.file.access.subject", rootDvNameAsList); case REJECTFILEACCESS: diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 4ba6ad0e7e1..a79e421fa58 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1048,11 +1048,11 @@

#{bundle['dataset.downloadUnrestricted']}

+ rendered="#{DatasetPage.guestbookAndTermsPopupRequired and !settingsWrapper.rsyncDownload}" + oncomplete="PF('guestbookAndTermsPopup').show();" /> @@ -1494,12 +1494,12 @@
- + - + - + @@ -1530,19 +1530,11 @@ - + - - - - - - - -
@@ -1883,7 +1875,7 @@ PF('downloadInvalid').show(); } if (outcome ==='GuestbookRequired'){ - PF('downloadPopup').show(); + PF('guestbookAndTermsPopup').show(); } } diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index 597d9a12786..4b075eb3377 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -60,7 +60,7 @@
  • - #{bundle['file.globus.of']} #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} - + update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + GT: #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} @@ -85,7 +86,7 @@
  • - #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} - + update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} - #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} - + update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} @@ -134,23 +136,24 @@
  • - #{bundle['file.downloadBtn.format.all']} - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{bundle['file.downloadBtn.format.all']}
  • - @@ -158,12 +161,13 @@ - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + @@ -171,35 +175,37 @@
  • - #{bundle['file.downloadBtn.format.tab']} - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{bundle['file.downloadBtn.format.tab']}
  • - #{bundle['file.downloadBtn.format.rdata']} - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{bundle['file.downloadBtn.format.rdata']} @@ -215,18 +221,19 @@
  • - #{bundle['file.downloadBtn.format.var']} - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{bundle['file.downloadBtn.format.var']}
  • @@ -303,20 +310,21 @@
  • - #{tool.getDisplayNameLang()} - + update="@widgetVar(guestbookAndTermsPopup)" + oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> + #{tool.getDisplayNameLang()}
  • diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index ae8729fdf89..d27536cb892 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -196,7 +196,7 @@ or FilePage.fileMetadata.dataFile.filePackage and systemConfig.HTTPDownload}"> - + @@ -343,11 +343,11 @@ - + - + @@ -356,7 +356,7 @@ - + From bc42df0946371c1af41560b79b5324ed5b565b99 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 19 May 2023 13:12:26 -0400 Subject: [PATCH 070/396] typo from merge --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index c0b4fc104ad..a3160a6e48b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3127,7 +3127,7 @@ public void startDownloadSelectedOriginal() { private void startDownload(boolean downloadOriginal){ boolean guestbookRequired = isDownloadPopupRequired(); - boolean validate = validateFilesForDownload(guestbookRequired); + boolean validate = validateFilesForDownload(downloadOriginal); if (validate) { updateGuestbookResponse(guestbookRequired, downloadOriginal); if(!guestbookRequired && !getValidateFilesOutcome().equals("Mixed")){ From 57e984b0e468a55f578b3b21e4787c1a63e4dca9 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 19 May 2023 13:27:52 -0400 Subject: [PATCH 071/396] fix for #9601 --- .../edu/harvard/iq/dataverse/ingest/IngestServiceBean.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 7cdfda8d082..5a353453fe8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -332,9 +332,7 @@ public List saveAndAddFilesToDataset(DatasetVersion version, } catch (IOException e) { logger.warning("Error getting ingest limit for file: " + dataFile.getIdentifier() + " : " + e.getMessage()); } - if (unattached) { - dataFile.setOwner(null); - } + if (savedSuccess && belowLimit) { // These are all brand new files, so they should all have // one filemetadata total. -- L.A. @@ -388,6 +386,9 @@ public List saveAndAddFilesToDataset(DatasetVersion version, dataFile.setContentType(FileUtil.MIME_TYPE_TSV); } } + if (unattached) { + dataFile.setOwner(null); + } // ... and let's delete the main temp file if it exists: if(tempLocationPath!=null) { try { From 0c76f7b02b4214efa37b5ac7a5d23f308afae5d5 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 19 May 2023 15:53:09 -0400 Subject: [PATCH 072/396] remove QDR updates --- src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java index 34176f7fb26..1d481f18cf5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java @@ -138,7 +138,7 @@ public void testIsDownloadPopupRequiredLicenseCC0() { DatasetVersion dsv1 = new DatasetVersion(); dsv1.setVersionState(DatasetVersion.VersionState.RELEASED); TermsOfUseAndAccess termsOfUseAndAccess = new TermsOfUseAndAccess(); - License license = new License("CC0 1.0", "Creative Commons CC0 1.0 Universal Public Domain Dedication.", URI.create("https://creativecommons.org/publicdomain/zero/1.0"), URI.create("https://licensebuttons.net/p/zero/1.0/88x31.png"), true,1L); + License license = new License("CC0", "You can copy, modify, distribute and perform the work, even for commercial purposes, all without asking permission.", URI.create("http://creativecommons.org/publicdomain/zero/1.0"), URI.create("/resources/images/cc0.png"), true, 1l); license.setDefault(true); termsOfUseAndAccess.setLicense(license); dsv1.setTermsOfUseAndAccess(termsOfUseAndAccess); From 45d9042a226cc53cb8f8a09902a849889786cc0b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 19 May 2023 23:42:32 +0200 Subject: [PATCH 073/396] style(api): fix typos and style issues in BearerTokenAuthMechanism --- .../api/auth/BearerTokenAuthMechanism.java | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java index c4b03728179..856670523b1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java @@ -23,6 +23,8 @@ public class BearerTokenAuthMechanism implements AuthMechanism { private static final String BEARER_AUTH_SCHEME = "Bearer"; + private static final Logger logger = Logger.getLogger(BearerTokenAuthMechanism.class.getCanonicalName()); + public static final String UNAUTHORIZED_BEARER_TOKEN = "Unauthorized bearer token"; public static final String INVALID_BEARER_TOKEN = "Could not parse bearer token"; public static final String BEARER_TOKEN_DETECTED_NO_OIDC_PROVIDER_CONFIGURED = "Bearer token detected, no OIDC provider configured"; @@ -31,18 +33,19 @@ public class BearerTokenAuthMechanism implements AuthMechanism { protected AuthenticationServiceBean authSvc; @Inject protected UserServiceBean userSvc; - private static final Logger logger = Logger.getLogger(BearerTokenAuthMechanism.class.getCanonicalName()); + @Override public User findUserFromRequest(ContainerRequestContext containerRequestContext) throws WrappedAuthErrorResponse { if (FeatureFlags.API_BEARER_AUTH.enabled()) { Optional bearerToken = getRequestApiKey(containerRequestContext); // No Bearer Token present, hence no user can be authenticated - if (!bearerToken.isPresent()) { + if (bearerToken.isEmpty()) { return null; } + // Validate and verify provided Bearer Token, and retrieve UserRecordIdentifier // TODO: Get the identifier from an invalidating cache to avoid lookup bursts of the same token. Tokens in the cache should be removed after some (configurable) time. - UserRecordIdentifier userInfo = verifyOidcBearerTokenAndGetUserIndentifier(bearerToken.get()); + UserRecordIdentifier userInfo = verifyOidcBearerTokenAndGetUserIdentifier(bearerToken.get()); // retrieve Authenticated User from AuthService AuthenticatedUser authUser = authSvc.lookupUser(userInfo); @@ -67,7 +70,7 @@ public User findUserFromRequest(ContainerRequestContext containerRequestContext) * @param token The string containing the encoded JWT * @return */ - private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIndentifier(String token) throws WrappedAuthErrorResponse { + private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIdentifier(String token) throws WrappedAuthErrorResponse { try { BearerAccessToken accessToken = BearerAccessToken.parse(token); // Get list of all authentication providers using Open ID Connect @@ -108,7 +111,7 @@ private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIndentifier(String t * Retrieve the raw, encoded token value from the Authorization Bearer HTTP header as defined in RFC 6750 * @return An {@link Optional} either empty if not present or the raw token from the header */ - private Optional getRequestApiKey(ContainerRequestContext containerRequestContext) { + private Optional getRequestApiKey(ContainerRequestContext containerRequestContext) { String headerParamApiKey = containerRequestContext.getHeaderString(HttpHeaders.AUTHORIZATION); if (headerParamApiKey != null && headerParamApiKey.toLowerCase().startsWith(BEARER_AUTH_SCHEME.toLowerCase() + " ")) { return Optional.of(headerParamApiKey); From bb49ea52f482b1b6466e124ca926453670699b09 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 19 May 2023 23:47:31 +0200 Subject: [PATCH 074/396] refactor(api,auth): improve OIDCAuthProvider.getUserIdentifier - Reuse more existing code paths to avoid duplication - Make actual use of returning an empty optional - Remove no longer checked exception OAuth2Exception - Improve Javadocs of method - Don't just retrieve and bail out on fail but provide smaller analysis steps with logs - Rename method to be more concise in name selection - Change BearerTokenAuthMechanism accordingly --- .../api/auth/BearerTokenAuthMechanism.java | 5 +- .../oauth2/oidc/OIDCAuthProvider.java | 47 +++++++++++++++---- 2 files changed, 39 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java index 856670523b1..eeabcba9f06 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java @@ -5,7 +5,6 @@ import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; -import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception; import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthProvider; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; @@ -88,12 +87,12 @@ private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIdentifier(String to for (OIDCAuthProvider provider : providers) { try { // The OIDCAuthProvider need to verify a Bearer Token and equip the client means to identify the corresponding AuthenticatedUser. - Optional userInfo = provider.getUserIdentifierForValidToken(accessToken); + Optional userInfo = provider.getUserIdentifier(accessToken); if(userInfo.isPresent()) { logger.log(Level.FINE, "Bearer token detected, provider {0} confirmed validity and provided identifier", provider.getId()); return userInfo.get(); } - } catch ( IOException| OAuth2Exception e) { + } catch (IOException e) { logger.log(Level.FINE, "Bearer token detected, provider " + provider.getId() + " indicates an invalid Token, skipping", e); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index 4cf2eeb626a..52362f7abeb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -46,6 +46,7 @@ import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; +import java.util.logging.Level; import java.util.logging.Logger; /** @@ -305,16 +306,42 @@ Optional getUserInfo(BearerAccessToken accessToken) throws IOException } /** - * Returns the UserRecordIdentifier corresponding to the given accessToken if valid. - * UserRecordIdentifier (same used as in OAuth2UserRecord), i.e. can be used to find a local UserAccount. - * @param accessToken - * @return Returns the UserRecordIdentifier corresponding to the given accessToken if valid. - * @throws IOException - * @throws OAuth2Exception + * Trades an access token for an {@link UserRecordIdentifier} (if valid). + * + * @apiNote The resulting {@link UserRecordIdentifier} may be used with + * {@link edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean#lookupUser(UserRecordIdentifier)} + * to look up an {@link edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser} from the database. + * @see edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism + * + * @param accessToken The token to use when requesting user information from the provider + * @return Returns an {@link UserRecordIdentifier} for a valid access token or an empty {@link Optional}. + * @throws IOException In case communication with the endpoint fails to succeed for an I/O reason */ - public Optional getUserIdentifierForValidToken(BearerAccessToken accessToken) throws IOException, OAuth2Exception{ - // Request the UserInfoEndpoint to obtain UserInfo, since this endpoint also validate the Token we can reuse the existing code path. - // As an alternative we could use the Introspect Endpoint or assume the Token as some encoded information (i.e. JWT). - return Optional.of(new UserRecordIdentifier( this.getId(), getUserInfo(accessToken).get().getSubject().getValue())); + public Optional getUserIdentifier(BearerAccessToken accessToken) throws IOException { + OAuth2UserRecord userRecord; + try { + // Try to retrieve with given token (throws if invalid token) + Optional userInfo = getUserInfo(accessToken); + + if (userInfo.isPresent()) { + // Take this detour to avoid code duplication and potentially hard to track conversion errors. + userRecord = getUserRecord(userInfo.get()); + } else { + // This should not happen - an error at the provider side will lead to an exception. + logger.log(Level.WARNING, + "User info retrieval from {0} returned empty optional but expected exception for token {1}.", + List.of(getId(), accessToken).toArray() + ); + return Optional.empty(); + } + } catch (OAuth2Exception e) { + logger.log(Level.FINE, + "Could not retrieve user info with token {0} at provider {1}: {2}", + List.of(accessToken, getId(), e.getMessage()).toArray()); + logger.log(Level.FINER, "Retrieval failed, details as follows: ", e); + return Optional.empty(); + } + + return Optional.of(userRecord.getUserRecordIdentifier()); } } From cecb034ad7e2be7c47496e0197a7c591e2be503e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sat, 20 May 2023 00:01:38 +0200 Subject: [PATCH 075/396] refactor(api): add TODO in BearerTokenAuthMechanism not to neglect IO exceptions --- .../iq/dataverse/api/auth/BearerTokenAuthMechanism.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java index eeabcba9f06..e26717e97b1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java @@ -93,6 +93,9 @@ private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIdentifier(String to return userInfo.get(); } } catch (IOException e) { + // TODO: Just logging this is not sufficient - if there is an IO error with the one provider + // which would have validated successfully, this is not the users fault. We need to + // take note and refer to that later when occurred. logger.log(Level.FINE, "Bearer token detected, provider " + provider.getId() + " indicates an invalid Token, skipping", e); } } From 647315cb52fbdbb45fb33492bdcbd9c829c73a16 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sun, 21 May 2023 22:49:47 +0200 Subject: [PATCH 076/396] feat(test): extend JUnit5 test helper JvmSettings - Adding a @JvmSetting to a testclass now applies it before all test methods (will be reset if necessary after all tests ran) - Enable deleting existing settings (which allows to override a class wide setting for example) by setting value to "null" - Introduce settings broker with a first simple local implementation as extension point to set these settings out somewhere (TM). - Broker requires annotation which one to use within a class at class level --- .../util/testing/JvmSettingBroker.java | 43 +++++ .../util/testing/JvmSettingExtension.java | 164 ++++++++++++------ .../util/testing/LocalJvmSettings.java | 39 +++++ 3 files changed, 191 insertions(+), 55 deletions(-) create mode 100644 src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingBroker.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/util/testing/LocalJvmSettings.java diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingBroker.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingBroker.java new file mode 100644 index 00000000000..1235df89b3e --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingBroker.java @@ -0,0 +1,43 @@ +package edu.harvard.iq.dataverse.util.testing; + +import java.io.IOException; + +/** + * Provide an interface to access and manipulate {@link edu.harvard.iq.dataverse.settings.JvmSettings} + * at some place (local, remote, different ways to access, etc.). + * Part of the {@link JvmSettingExtension} extension to allow JUnit5 tests to manipulate these + * settings, enabling to test different code paths and so on. + * @implNote Keep in mind to use methods that do not require restarts or similar to set or delete a setting. + * This must be changeable on the fly, otherwise it will be useless for testing. + * Yes, non-hot-reloadable settings may be a problem. The code should be refactored in these cases. + */ +public interface JvmSettingBroker { + + /** + * Receive the value of a {@link edu.harvard.iq.dataverse.settings.JvmSettings} given as its {@link String} + * representation. The reason for this is that we may have inserted variable names already. + * @param key The JVM setting to receive as key, e.g. "dataverse.fqdn". + * @return The value of the setting if present or null. + * @throws IOException When communication goes sideways. + */ + String getJvmSetting(String key) throws IOException; + + /** + * Set the value of a {@link edu.harvard.iq.dataverse.settings.JvmSettings} (given as its {@link String} + * representation). The reason for this is that we may have inserted variable names already. + * @param key The JVM setting to receive as key, e.g. "dataverse.fqdn". + * @param value The JVM setting's value we want to have it set to. + * @throws IOException When communication goes sideways. + */ + void setJvmSetting(String key, String value) throws IOException; + + /** + * Remove the value of a {@link edu.harvard.iq.dataverse.settings.JvmSettings} (given as its {@link String} + * representation). For some tests, one might want to clear a certain setting again and potentially have it set + * back afterward. The reason for this is that we may have inserted variable names already. + * @param key The JVM setting to receive as key, e.g. "dataverse.fqdn". + * @throws IOException When communication goes sideways. + */ + String deleteJvmSetting(String key) throws IOException; + +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java index 17728e75ffc..2065d7b3ae6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java @@ -1,81 +1,124 @@ package edu.harvard.iq.dataverse.util.testing; import edu.harvard.iq.dataverse.settings.JvmSettings; +import org.junit.jupiter.api.extension.AfterAllCallback; import org.junit.jupiter.api.extension.AfterTestExecutionCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; import org.junit.jupiter.api.extension.BeforeTestExecutionCallback; import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.platform.commons.support.AnnotationSupport; +import org.junit.platform.commons.support.ReflectionSupport; -import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.util.List; +import java.util.Optional; import static edu.harvard.iq.dataverse.util.testing.JvmSetting.PLACEHOLDER; -public class JvmSettingExtension implements BeforeTestExecutionCallback, AfterTestExecutionCallback { +public class JvmSettingExtension implements BeforeTestExecutionCallback, AfterTestExecutionCallback, BeforeAllCallback, AfterAllCallback { - private ExtensionContext.Store getStore(ExtensionContext context) { - return context.getStore(ExtensionContext.Namespace.create(getClass(), context.getRequiredTestClass(), context.getRequiredTestMethod())); + @Override + public void beforeAll(ExtensionContext extensionContext) throws Exception { + List settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestClass(), JvmSetting.class); + ExtensionContext.Store store = extensionContext.getStore( + ExtensionContext.Namespace.create(getClass(), extensionContext.getRequiredTestClass())); + + setSetting(extensionContext.getRequiredTestClass(), settings, getBroker(extensionContext), store); + } + + @Override + public void afterAll(ExtensionContext extensionContext) throws Exception { + List settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestClass(), JvmSetting.class); + ExtensionContext.Store store = extensionContext.getStore( + ExtensionContext.Namespace.create(getClass(), extensionContext.getRequiredTestClass())); + + resetSetting(settings, getBroker(extensionContext), store); } @Override public void beforeTestExecution(ExtensionContext extensionContext) throws Exception { - extensionContext.getTestMethod().ifPresent(method -> { - JvmSetting[] settings = method.getAnnotationsByType(JvmSetting.class); - for (JvmSetting setting : settings) { - // get the setting name (might need var args substitution) - String settingName = getSettingName(setting); - - // get the setting ... - String oldSetting = System.getProperty(settingName); + List settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestMethod(), JvmSetting.class); + ExtensionContext.Store store = extensionContext.getStore( + ExtensionContext.Namespace.create( + getClass(), + extensionContext.getRequiredTestClass(), + extensionContext.getRequiredTestMethod() + )); + + setSetting(extensionContext.getRequiredTestClass(), settings, getBroker(extensionContext), store); + } - // if present - store in context to restore later - if (oldSetting != null) { - getStore(extensionContext).put(settingName, oldSetting); - } - - // set to new value - if (setting.value().equals(PLACEHOLDER) && setting.method().equals(PLACEHOLDER)) { - throw new IllegalArgumentException("You must either provide a value or a method reference " + - "for key JvmSettings." + setting.key()); - } - - // retrieve value from static test class method if no setting given - if (setting.value().equals(PLACEHOLDER)) { - extensionContext.getTestClass().ifPresent(klass -> { - try { - Method valueMethod = klass.getDeclaredMethod(setting.method()); - valueMethod.setAccessible(true); - System.setProperty(settingName, (String)valueMethod.invoke(null)); - } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { - throw new RuntimeException(e); - } - }); - } else { - System.setProperty(settingName, setting.value()); + @Override + public void afterTestExecution(ExtensionContext extensionContext) throws Exception { + List settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestMethod(), JvmSetting.class); + ExtensionContext.Store store = extensionContext.getStore( + ExtensionContext.Namespace.create( + getClass(), + extensionContext.getRequiredTestClass(), + extensionContext.getRequiredTestMethod() + )); + + resetSetting(settings, getBroker(extensionContext), store); + } + + private void setSetting(Class testClass, List settings, JvmSettingBroker broker, ExtensionContext.Store store) throws Exception { + for (JvmSetting setting : settings) { + // get the setting name (might need var args substitution) + String settingName = getSettingName(setting); + + // get the setting value ... + String oldSetting = broker.getJvmSetting(settingName); + + // if present - store in context to restore later + if (oldSetting != null) { + store.put(settingName, oldSetting); + } + + // set to new value + if (setting.value().equals(PLACEHOLDER) && setting.method().equals(PLACEHOLDER)) { + throw new IllegalArgumentException("You must either provide a value or a method reference " + + "for key JvmSettings" + setting.key()); + } + + String value; + // Retrieve value from static (!) test class method if no direct setting given + if (setting.value().equals(PLACEHOLDER)) { + Optional valueMethod = ReflectionSupport.findMethod(testClass, setting.method()); + if (valueMethod.isEmpty() || ! Modifier.isStatic(valueMethod.get().getModifiers())) { + throw new IllegalStateException("Could not find a static method '" + setting.method() + "' in test class"); } + value = (String) ReflectionSupport.invokeMethod(valueMethod.get(), null); + // Set to new value by using the directly given value + } else { + value = setting.value(); } - }); + + // If the retrieved value is null, delete the setting (will be reset after the test), otherwise set. + if (value != null) { + broker.setJvmSetting(settingName, value); + } else if (oldSetting != null) { + broker.deleteJvmSetting(settingName); + } + } } - @Override - public void afterTestExecution(ExtensionContext extensionContext) throws Exception { - extensionContext.getTestMethod().ifPresent(method -> { - JvmSetting[] settings = method.getAnnotationsByType(JvmSetting.class); - for (JvmSetting setting : settings) { - // get the setting name (might need var args substitution) - String settingName = getSettingName(setting); - - // get a stored setting from context - String oldSetting = getStore(extensionContext).remove(settingName, String.class); - - // if present before, restore - if (oldSetting != null) { - System.setProperty(settingName, oldSetting); + private void resetSetting(List settings, JvmSettingBroker broker, ExtensionContext.Store store) throws Exception { + for (JvmSetting setting : settings) { + // get the setting name (might need var args substitution) + String settingName = getSettingName(setting); + + // get a stored setting from context + String oldSetting = store.remove(settingName, String.class); + + // if present before, restore + if (oldSetting != null) { + broker.setJvmSetting(settingName, oldSetting); // if NOT present before, delete - } else { - System.clearProperty(settingName); - } + } else { + broker.deleteJvmSetting(settingName); } - }); + } } private String getSettingName(JvmSetting setting) { @@ -95,4 +138,15 @@ private String getSettingName(JvmSetting setting) { return target.getScopedKey(); } + + private JvmSettingBroker getBroker(ExtensionContext extensionContext) throws Exception { + // Is this test class using local system properties, then get a broker for these + if (AnnotationSupport.isAnnotated(extensionContext.getTestClass(), LocalJvmSettings.class)) { + return LocalJvmSettings.localBroker; + // NOTE: this might be extended later with other annotations to support other means of handling the settings + } else { + throw new IllegalStateException("You must provide the @LocalJvmSettings annotation to the test class"); + } + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/LocalJvmSettings.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/LocalJvmSettings.java new file mode 100644 index 00000000000..372fa91f6f6 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/LocalJvmSettings.java @@ -0,0 +1,39 @@ +package edu.harvard.iq.dataverse.util.testing; + +import org.junit.jupiter.api.extension.ExtendWith; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * This annotation expresses that a test class wants to manipulate local + * settings (because the tests run within the same JVM as the code itself). + * This is mostly true for unit tests. + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE }) +@ExtendWith(JvmSettingExtension.class) +@Inherited +public @interface LocalJvmSettings { + + JvmSettingBroker localBroker = new JvmSettingBroker() { + @Override + public String getJvmSetting(String key) { + return System.getProperty(key); + } + + @Override + public void setJvmSetting(String key, String value) { + System.setProperty(key, value); + } + + @Override + public String deleteJvmSetting(String key) { + return System.clearProperty(key); + } + }; + +} \ No newline at end of file From 645770f0c08c042f934707d88c187de34ebab95b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sun, 21 May 2023 22:59:20 +0200 Subject: [PATCH 077/396] refactor(test): make existing test using JvmSetting annotated with @LocalJvmSettings --- .../dataverse/api/auth/BearerTokenAuthMechanismTest.java | 4 +++- .../api/auth/SessionCookieAuthMechanismTest.java | 8 +++++--- .../iq/dataverse/export/SchemaDotOrgExporterTest.java | 2 ++ .../dataverse/externaltools/ExternalToolHandlerTest.java | 2 ++ .../harvard/iq/dataverse/search/IndexServiceBeanTest.java | 2 ++ .../iq/dataverse/search/SolrClientServiceTest.java | 2 ++ .../harvard/iq/dataverse/settings/FeatureFlagsTest.java | 2 ++ .../harvard/iq/dataverse/settings/JvmSettingsTest.java | 2 ++ .../edu/harvard/iq/dataverse/util/SystemConfigTest.java | 2 ++ .../edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java | 2 ++ 10 files changed, 24 insertions(+), 4 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java index 0370daa5ea2..b38300df660 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java @@ -12,6 +12,7 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mockito; @@ -25,7 +26,8 @@ import static edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism.*; import static org.junit.jupiter.api.Assertions.*; -public class BearerTokenAuthMechanismTest { +@LocalJvmSettings +class BearerTokenAuthMechanismTest { private static final String TEST_API_KEY = "test-api-key"; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanismTest.java index 477f8ee377a..74a7d239c05 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanismTest.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mockito; @@ -13,7 +14,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; -public class SessionCookieAuthMechanismTest { +@LocalJvmSettings +class SessionCookieAuthMechanismTest { private SessionCookieAuthMechanism sut; @@ -24,7 +26,7 @@ public void setUp() { @Test @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "false", varArgs = "api-session-auth") - public void testFindUserFromRequest_FeatureFlagDisabled() throws WrappedAuthErrorResponse { + void testFindUserFromRequest_FeatureFlagDisabled() throws WrappedAuthErrorResponse { sut.session = Mockito.mock(DataverseSession.class); User actual = sut.findUserFromRequest(new ContainerRequestTestFake()); @@ -34,7 +36,7 @@ public void testFindUserFromRequest_FeatureFlagDisabled() throws WrappedAuthErro @Test @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-session-auth") - public void testFindUserFromRequest_FeatureFlagEnabled_UserAuthenticated() throws WrappedAuthErrorResponse { + void testFindUserFromRequest_FeatureFlagEnabled_UserAuthenticated() throws WrappedAuthErrorResponse { DataverseSession dataverseSessionStub = Mockito.mock(DataverseSession.class); User testAuthenticatedUser = new AuthenticatedUser(); Mockito.when(dataverseSessionStub.getUser()).thenReturn(testAuthenticatedUser); diff --git a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java index e660cf78da2..722b74406d4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java @@ -33,6 +33,7 @@ import javax.json.JsonObject; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Test; @@ -44,6 +45,7 @@ /** * For docs see {@link SchemaDotOrgExporter}. */ +@LocalJvmSettings public class SchemaDotOrgExporterTest { private static final Logger logger = Logger.getLogger(SchemaDotOrgExporterTest.class.getCanonicalName()); diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java index ab3a0263d66..c77d59123e4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java @@ -10,6 +10,7 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.Test; import javax.json.Json; @@ -21,6 +22,7 @@ import java.util.ArrayList; import java.util.List; +@LocalJvmSettings public class ExternalToolHandlerTest { // TODO: It would probably be better to split these into individual tests. diff --git a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java index dd3dc2c6c95..ce6005a3d11 100644 --- a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java @@ -18,6 +18,7 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.junit.jupiter.api.BeforeEach; @@ -37,6 +38,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; +@LocalJvmSettings @ExtendWith(MockitoExtension.class) public class IndexServiceBeanTest { private static final Logger logger = Logger.getLogger(IndexServiceBeanTest.class.getCanonicalName()); diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java index a3b3c8a2080..72eafcd763c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java @@ -4,6 +4,7 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -14,6 +15,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; +@LocalJvmSettings @ExtendWith(MockitoExtension.class) class SolrClientServiceTest { diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/FeatureFlagsTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/FeatureFlagsTest.java index 1a9fdeaa3da..26f2186695d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/settings/FeatureFlagsTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/settings/FeatureFlagsTest.java @@ -1,10 +1,12 @@ package edu.harvard.iq.dataverse.settings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; +@LocalJvmSettings class FeatureFlagsTest { @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java index 559d00fe0b7..6b03f20fc41 100644 --- a/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java @@ -1,11 +1,13 @@ package edu.harvard.iq.dataverse.settings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import edu.harvard.iq.dataverse.util.testing.SystemProperty; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; +@LocalJvmSettings class JvmSettingsTest { @Test @JvmSetting(key = JvmSettings.VERSION, value = "foobar") diff --git a/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java b/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java index 2806aa3aa9b..82b89bca678 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; @@ -15,6 +16,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.doReturn; +@LocalJvmSettings @ExtendWith(MockitoExtension.class) class SystemConfigTest { diff --git a/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java index 8310af8885c..d70a108e7c6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java @@ -9,6 +9,7 @@ import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.Test; import java.util.ArrayList; @@ -16,6 +17,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; +@LocalJvmSettings class UrlTokenUtilTest { @Test From dd80162cb7f1b6ad5268057cdb7da547c9bf4b62 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sun, 21 May 2023 23:01:15 +0200 Subject: [PATCH 078/396] style(test,api,auth): make BearerTokenAuthMechanismTest simpler --- .../auth/BearerTokenAuthMechanismTest.java | 55 +++++-------------- 1 file changed, 15 insertions(+), 40 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java index b38300df660..8a57ee4c41c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java @@ -27,6 +27,7 @@ import static org.junit.jupiter.api.Assertions.*; @LocalJvmSettings +@JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") class BearerTokenAuthMechanismTest { private static final String TEST_API_KEY = "test-api-key"; @@ -36,14 +37,12 @@ class BearerTokenAuthMechanismTest { @BeforeEach public void setUp() { sut = new BearerTokenAuthMechanism(); - } - - @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_no_token() throws WrappedAuthErrorResponse { sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); sut.userSvc = Mockito.mock(UserServiceBean.class); + } + @Test + void testFindUserFromRequest_no_token() throws WrappedAuthErrorResponse { ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake(null); User actual = sut.findUserFromRequest(testContainerRequest); @@ -51,11 +50,9 @@ public void testFindUserFromRequest_no_token() throws WrappedAuthErrorResponse { } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_invalid_token() throws WrappedAuthErrorResponse { - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - sut.userSvc = Mockito.mock(UserServiceBean.class); + void testFindUserFromRequest_invalid_token() throws WrappedAuthErrorResponse { Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet()); + ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer "); WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); @@ -63,11 +60,9 @@ public void testFindUserFromRequest_invalid_token() throws WrappedAuthErrorRespo assertEquals(INVALID_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage()); } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_no_OidcProvider() throws WrappedAuthErrorResponse { - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - sut.userSvc = Mockito.mock(UserServiceBean.class); + void testFindUserFromRequest_no_OidcProvider() throws WrappedAuthErrorResponse { Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet()); + ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " +TEST_API_KEY); WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest)); @@ -76,12 +71,7 @@ public void testFindUserFromRequest_no_OidcProvider() throws WrappedAuthErrorRes } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { - - sut.userSvc = Mockito.mock(UserServiceBean.class); - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - + void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -91,7 +81,7 @@ public void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedA // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifierForValidToken(token)).thenReturn(Optional.empty()); + Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenReturn(Optional.empty()); // when ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " + TEST_API_KEY); @@ -102,12 +92,7 @@ public void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedA } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { - - sut.userSvc = Mockito.mock(UserServiceBean.class); - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - + void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -127,12 +112,7 @@ public void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedA assertEquals(UNAUTHORIZED_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage()); } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { - - sut.userSvc = Mockito.mock(UserServiceBean.class); - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - + void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -143,7 +123,7 @@ public void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthE // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token UserRecordIdentifier userinfo = new UserRecordIdentifier(providerID, "KEY"); BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifierForValidToken(token)).thenReturn(Optional.of(userinfo)); + Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenReturn(Optional.of(userinfo)); // ensures that the AuthenticationServiceBean can retrieve an Authenticated user based on the UserRecordIdentifier AuthenticatedUser testAuthenticatedUser = new AuthenticatedUser(); @@ -160,12 +140,7 @@ public void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthE } @Test - @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth") - public void testFindUserFromRequest_oneProvider_validToken_noAccount() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { - - sut.userSvc = Mockito.mock(UserServiceBean.class); - sut.authSvc = Mockito.mock(AuthenticationServiceBean.class); - + void testFindUserFromRequest_oneProvider_validToken_noAccount() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -176,7 +151,7 @@ public void testFindUserFromRequest_oneProvider_validToken_noAccount() throws Wr // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token UserRecordIdentifier userinfo = new UserRecordIdentifier(providerID, "KEY"); BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifierForValidToken(token)).thenReturn(Optional.of(userinfo)); + Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenReturn(Optional.of(userinfo)); // ensures that the AuthenticationServiceBean can retrieve an Authenticated user based on the UserRecordIdentifier Mockito.when(sut.authSvc.lookupUser(userinfo)).thenReturn(null); From 544a502e3f57b52a2dbaae562e43083e7a015315 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sun, 21 May 2023 23:03:33 +0200 Subject: [PATCH 079/396] refactor(auth,api,test): adapt exception checks in BearerTokenAuthMechanismTest - Now that we no longer need to catch OAuth2Exception, adapt accordingly - This fixed the failing tests --- .../api/auth/BearerTokenAuthMechanismTest.java | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java index 8a57ee4c41c..281f1d21d45 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java @@ -6,7 +6,6 @@ import edu.harvard.iq.dataverse.api.auth.doubles.BearerTokenKeyContainerRequestTestFake; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; -import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception; import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthProvider; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; @@ -50,7 +49,7 @@ void testFindUserFromRequest_no_token() throws WrappedAuthErrorResponse { } @Test - void testFindUserFromRequest_invalid_token() throws WrappedAuthErrorResponse { + void testFindUserFromRequest_invalid_token() { Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet()); ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer "); @@ -60,7 +59,7 @@ void testFindUserFromRequest_invalid_token() throws WrappedAuthErrorResponse { assertEquals(INVALID_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage()); } @Test - void testFindUserFromRequest_no_OidcProvider() throws WrappedAuthErrorResponse { + void testFindUserFromRequest_no_OidcProvider() { Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet()); ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " +TEST_API_KEY); @@ -71,7 +70,7 @@ void testFindUserFromRequest_no_OidcProvider() throws WrappedAuthErrorResponse { } @Test - void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { + void testFindUserFromRequest_oneProvider_invalidToken_1() throws ParseException, IOException { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -92,7 +91,7 @@ void testFindUserFromRequest_oneProvider_invalidToken_1() throws WrappedAuthErro } @Test - void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { + void testFindUserFromRequest_oneProvider_invalidToken_2() throws ParseException, IOException { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -102,7 +101,7 @@ void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedAuthErro // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY); - Mockito.when(oidcAuthProvider.getUserIdentifierForValidToken(token)).thenThrow(OAuth2Exception.class); + Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenThrow(IOException.class); // when ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " + TEST_API_KEY); @@ -112,7 +111,7 @@ void testFindUserFromRequest_oneProvider_invalidToken_2() throws WrappedAuthErro assertEquals(UNAUTHORIZED_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage()); } @Test - void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { + void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorResponse, ParseException, IOException { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); @@ -140,7 +139,7 @@ void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorRes } @Test - void testFindUserFromRequest_oneProvider_validToken_noAccount() throws WrappedAuthErrorResponse, ParseException, IOException, OAuth2Exception { + void testFindUserFromRequest_oneProvider_validToken_noAccount() throws WrappedAuthErrorResponse, ParseException, IOException { OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class); String providerID = "OIEDC"; Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID); From c859ef64adc6cb59065da4f0cf6aa0097a022701 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sun, 21 May 2023 23:16:19 +0200 Subject: [PATCH 080/396] doc(test): add changes to JvmSetting helper in testing guide --- doc/sphinx-guides/source/developers/testing.rst | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index c734bed8b70..5814d9d4e7b 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -85,20 +85,26 @@ JUnit 5 Test Helper Extensions Our codebase provides little helpers to ease dealing with state during tests. Some tests might need to change something which should be restored after the test ran. -For unit tests, the most interesting part is to set a JVM setting just for the current test. -Please use the ``@JvmSetting(key = JvmSettings.XXX, value = "")`` annotation on a test method or -a test class to set and clear the property automatically. +For unit tests, the most interesting part is to set a JVM setting just for the current test or a whole test class. +(Which might be an inner class, too!). Please make use of the ``@JvmSetting(key = JvmSettings.XXX, value = "")`` +annotation and also make sure to annotate the test class with ``@LocalJvmSettings``. Inspired by JUnit's ``@MethodSource`` annotation, you may use ``@JvmSetting(key = JvmSettings.XXX, method = "zzz")`` -to reference a method located in the same test class by name (i. e. ``private static String zzz() {}``) to allow +to reference a static method located in the same test class by name (i. e. ``private static String zzz() {}``) to allow retrieving dynamic data instead of String constants only. (Note the requirement for a *static* method!) +If you want to delete a setting, simply provide a ``null`` value. This can be used to override a class-wide setting +or some other default that is present for some reason. + To set arbitrary system properties for the current test, a similar extension ``@SystemProperty(key = "", value = "")`` has been added. (Note: it does not support method references.) Both extensions will ensure the global state of system properties is non-interfering for test executions. Tests using these extensions will be executed in serial. +This settings helper may be extended at a later time to manipulate settings in a remote instance during integration +or end-to-end testing. Stay tuned! + Observing Changes to Code Coverage ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From bda75c18d5b1799e81f17f6711b9323441f559ff Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 01:46:52 +0200 Subject: [PATCH 081/396] feat(model): make UserRecordIdentifier comparable Adding equals and hashCode methods to enable testing for equality in tests, etc. --- .../authorization/UserRecordIdentifier.java | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java b/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java index 963ee592bbf..dfbb43fae46 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java @@ -2,6 +2,8 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import java.util.Objects; + /** * Identifies a user using two strings: *
      @@ -38,4 +40,16 @@ public AuthenticatedUserLookup createAuthenticatedUserLookup( AuthenticatedUser return new AuthenticatedUserLookup(userIdInRepo, repoId, u); } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof UserRecordIdentifier)) return false; + UserRecordIdentifier that = (UserRecordIdentifier) o; + return Objects.equals(repoId, that.repoId) && Objects.equals(getUserIdInRepo(), that.getUserIdInRepo()); + } + + @Override + public int hashCode() { + return Objects.hash(repoId, getUserIdInRepo()); + } } From 7004191eeec6f2deb5a01d52222fa1da9cea725b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 01:52:38 +0200 Subject: [PATCH 082/396] feat(test,api,auth): create actual integration tests in OIDCAuthenticationProviderFactoryIT - First test makes a roundtrip to receive the user info for the kcuser - Second test simulates an API request with a bearer token --- .../OIDCAuthenticationProviderFactoryIT.java | 123 +++++++++++++++++- 1 file changed, 116 insertions(+), 7 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java index 53cfcca2742..a5aa29cc083 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -1,37 +1,146 @@ package edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc; +import com.nimbusds.oauth2.sdk.token.BearerAccessToken; +import com.nimbusds.openid.connect.sdk.claims.UserInfo; import dasniko.testcontainers.keycloak.KeycloakContainer; +import edu.harvard.iq.dataverse.UserServiceBean; +import edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism; +import edu.harvard.iq.dataverse.api.auth.doubles.BearerTokenKeyContainerRequestTestFake; +import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; +import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.mocks.MockAuthenticatedUser; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.keycloak.admin.client.Keycloak; +import org.keycloak.admin.client.KeycloakBuilder; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; +import java.util.Optional; +import java.util.Set; + +import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientId; +import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientSecret; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assumptions.assumeFalse; +import static org.junit.jupiter.api.Assumptions.assumeTrue; +import static org.mockito.Mockito.when; @Tag("testcontainers") @Testcontainers +@ExtendWith(MockitoExtension.class) +// NOTE: order is important here - Testcontainers must be first, otherwise it's not ready when we call getAuthUrl() +@LocalJvmSettings +@JvmSetting(key = JvmSettings.OIDC_CLIENT_ID, value = clientId) +@JvmSetting(key = JvmSettings.OIDC_CLIENT_SECRET, value = clientSecret) +@JvmSetting(key = JvmSettings.OIDC_AUTH_SERVER_URL, method = "getAuthUrl") class OIDCAuthenticationProviderFactoryIT { + // NOTE: the following values are taken from the realm import file! static final String clientId = "oidc-client"; static final String clientSecret = "ss6gE8mODCDfqesQaSG3gwUwZqZt547E"; static final String realm = "oidc-realm"; + static final String adminUser = "kcuser"; + static final String adminPassword = "kcpassword"; + static final String clientIdAdminCli = "admin-cli"; + // The realm JSON resides in conf/keycloak/oidc-realm.json and gets avail here using in pom.xml @Container - static KeycloakContainer keycloakContainer = new KeycloakContainer().withRealmImportFile("keycloak/oidc-realm.json"); + static KeycloakContainer keycloakContainer = new KeycloakContainer("quay.io/keycloak/keycloak:19.0") + .withRealmImportFile("keycloak/oidc-realm.json") + .withAdminUsername(adminUser) + .withAdminPassword(adminPassword); - // simple method to retrieve the issuer URL, referenced to by @JvmSetting annotations + // simple method to retrieve the issuer URL, referenced to by @JvmSetting annotations (do no delete) private static String getAuthUrl() { return keycloakContainer.getAuthServerUrl() + "realms/" + realm; } + OIDCAuthProvider getProvider() throws Exception { + OIDCAuthProvider oidcAuthProvider = (OIDCAuthProvider) OIDCAuthenticationProviderFactory.buildFromSettings(); + + assumeTrue(oidcAuthProvider.getMetadata().getTokenEndpointURI().toString() + .startsWith(keycloakContainer.getAuthServerUrl())); + + return oidcAuthProvider; + } + + Keycloak getAdminClient() { + return KeycloakBuilder.builder() + .serverUrl(keycloakContainer.getAuthServerUrl()) + .realm(realm) + .clientId(clientIdAdminCli) + .username(keycloakContainer.getAdminUsername()) + .password(keycloakContainer.getAdminPassword()) + .build(); + } + + String getBearerToken() throws Exception { + Keycloak keycloak = getAdminClient(); + return keycloak.tokenManager().getAccessTokenString(); + } + @Test - @JvmSetting(key = JvmSettings.OIDC_CLIENT_ID, value = clientId) - @JvmSetting(key = JvmSettings.OIDC_CLIENT_SECRET, value = clientSecret) - @JvmSetting(key = JvmSettings.OIDC_AUTH_SERVER_URL, method = "getAuthUrl") void testCreateProvider() throws Exception { - OIDCAuthProvider oidcAuthProvider = (OIDCAuthProvider) OIDCAuthenticationProviderFactory.buildFromSettings(); - assertTrue(oidcAuthProvider.getMetadata().getTokenEndpointURI().toString().startsWith(keycloakContainer.getAuthServerUrl())); + OIDCAuthProvider oidcAuthProvider = getProvider(); + String token = getBearerToken(); + assumeFalse(token == null); + + Optional info = oidcAuthProvider.getUserInfo(new BearerAccessToken(token)); + + assertTrue(info.isPresent()); + assertEquals(adminUser, info.get().getPreferredUsername()); + } + + @Mock + UserServiceBean userService; + @Mock + AuthenticationServiceBean authService; + + @InjectMocks + BearerTokenAuthMechanism bearerTokenAuthMechanism; + + @Test + @JvmSetting(key = JvmSettings.FEATURE_FLAG, varArgs = "api-bearer-auth", value = "true") + void testApiBearerAuth() throws Exception { + assumeFalse(userService == null); + assumeFalse(authService == null); + assumeFalse(bearerTokenAuthMechanism == null); + + // given + // Get the access token from the remote Keycloak in the container + String accessToken = getBearerToken(); + assumeFalse(accessToken == null); + + OIDCAuthProvider oidcAuthProvider = getProvider(); + // This will also receive the details from the remote Keycloak in the container + UserRecordIdentifier identifier = oidcAuthProvider.getUserIdentifier(new BearerAccessToken(accessToken)).get(); + String token = "Bearer " + accessToken; + BearerTokenKeyContainerRequestTestFake request = new BearerTokenKeyContainerRequestTestFake(token); + AuthenticatedUser user = new MockAuthenticatedUser(); + + // setup mocks (we don't want or need a database here) + when(authService.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Set.of(oidcAuthProvider.getId())); + when(authService.getAuthenticationProvider(oidcAuthProvider.getId())).thenReturn(oidcAuthProvider); + when(authService.lookupUser(identifier)).thenReturn(user); + when(userService.updateLastApiUseTime(user)).thenReturn(user); + + // when (let's do this again, but now with the actual subject under test!) + User lookedUpUser = bearerTokenAuthMechanism.findUserFromRequest(request); + + // then + assertNotNull(lookedUpUser); + assertEquals(user, lookedUpUser); } } \ No newline at end of file From 564d6a73bcc0e101299c7f370bdf5fc6d42f8287 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 01:53:34 +0200 Subject: [PATCH 083/396] feat(build): make integration test using Testcontainers executable via Maven Simply call mvn verify to execute, as it is meant to be! --- pom.xml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/pom.xml b/pom.xml index 8764e4f493d..adda4bb31f5 100644 --- a/pom.xml +++ b/pom.xml @@ -729,6 +729,22 @@ ${skipUnitTests} + + + org.apache.maven.plugins + maven-failsafe-plugin + + testcontainers + + + + + integration-test + verify + + + + org.apache.maven.plugins maven-checkstyle-plugin From c207b3baa95a01c9d67cefd1cacaf5784bd914ff Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 01:53:57 +0200 Subject: [PATCH 084/396] chore(build): update Keycloak Testcontainers version --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index adda4bb31f5..313f33b94b8 100644 --- a/pom.xml +++ b/pom.xml @@ -577,7 +577,7 @@ com.github.dasniko testcontainers-keycloak - 2.4.0 + 2.5.0 test From 7f8225f93c2353deb3f13e515ef1f43e9b0630c4 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 01:56:13 +0200 Subject: [PATCH 085/396] chore(build,test): add temporary servlet dependency in test scope This is necessary because the Jakarta EE 8.0.0 API package did not properly ship the bundle files necessary for servlet execution. Not including this testing dependency leads to very cryptic errors ala MissingResourceException for Bundle with lang en-US. This should be removed once we migrate to Jakarta EE 10. --- pom.xml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/pom.xml b/pom.xml index 313f33b94b8..e9a9b9dd611 100644 --- a/pom.xml +++ b/pom.xml @@ -580,6 +580,18 @@ 2.5.0 test + + + jakarta.servlet + jakarta.servlet-api + 4.0.4 + test + + org.mockito mockito-core From ac6354645c2bc8557e4cab19d2d28304605e634b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 16:09:29 +0200 Subject: [PATCH 086/396] fix(test,oidc): replace Keycloak Demo/Dev realm The realm used before (created with Keycloak 16) was for some reason not compatible with Keycloak 20+. A new "Test" realm was created with more users and working with Keycloak 20 and 21. All files to run Keycloak have been adapted in version and realm import file. --- conf/keycloak/docker-compose.yml | 12 +- .../keycloak/oidc-keycloak-auth-provider.json | 2 +- conf/keycloak/oidc-realm.json | 2108 ----------------- conf/keycloak/run-keycloak.sh | 4 +- conf/keycloak/test-realm.json | 1939 +++++++++++++++ docker-compose-dev.yml | 6 +- 6 files changed, 1952 insertions(+), 2119 deletions(-) delete mode 100644 conf/keycloak/oidc-realm.json create mode 100644 conf/keycloak/test-realm.json diff --git a/conf/keycloak/docker-compose.yml b/conf/keycloak/docker-compose.yml index 2776f6572df..12b2382bd3d 100644 --- a/conf/keycloak/docker-compose.yml +++ b/conf/keycloak/docker-compose.yml @@ -3,13 +3,15 @@ version: "3.9" services: keycloak: - image: 'jboss/keycloak:16.1.1' + image: 'quay.io/keycloak/keycloak:21.0' + command: + - "start-dev" + - "--import-realm" environment: - - KEYCLOAK_USER=kcadmin - - KEYCLOAK_PASSWORD=kcpassword - - KEYCLOAK_IMPORT=/tmp/oidc-realm.json + - KEYCLOAK_ADMIN=kcadmin + - KEYCLOAK_ADMIN_PASSWORD=kcpassword - KEYCLOAK_LOGLEVEL=DEBUG ports: - "8090:8080" volumes: - - './oidc-realm.json:/tmp/oidc-realm.json' + - './test-realm.json:/opt/keycloak/data/import/test-realm.json' diff --git a/conf/keycloak/oidc-keycloak-auth-provider.json b/conf/keycloak/oidc-keycloak-auth-provider.json index 7d09fe5f36e..7e01bd4c325 100644 --- a/conf/keycloak/oidc-keycloak-auth-provider.json +++ b/conf/keycloak/oidc-keycloak-auth-provider.json @@ -3,6 +3,6 @@ "factoryAlias": "oidc", "title": "OIDC-Keycloak", "subtitle": "OIDC-Keycloak", - "factoryData": "type: oidc | issuer: http://keycloak.mydomain.com:8090/realms/oidc-realm | clientId: oidc-client | clientSecret: ss6gE8mODCDfqesQaSG3gwUwZqZt547E", + "factoryData": "type: oidc | issuer: http://keycloak.mydomain.com:8090/realms/test | clientId: test | clientSecret: 94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8", "enabled": true } diff --git a/conf/keycloak/oidc-realm.json b/conf/keycloak/oidc-realm.json deleted file mode 100644 index 1b77f2b4384..00000000000 --- a/conf/keycloak/oidc-realm.json +++ /dev/null @@ -1,2108 +0,0 @@ -{ - "id": "oidc-realm", - "realm": "oidc-realm", - "notBefore": 0, - "defaultSignatureAlgorithm": "RS256", - "revokeRefreshToken": false, - "refreshTokenMaxReuse": 0, - "accessTokenLifespan": 300, - "accessTokenLifespanForImplicitFlow": 900, - "ssoSessionIdleTimeout": 1800, - "ssoSessionMaxLifespan": 36000, - "ssoSessionIdleTimeoutRememberMe": 0, - "ssoSessionMaxLifespanRememberMe": 0, - "offlineSessionIdleTimeout": 2592000, - "offlineSessionMaxLifespanEnabled": false, - "offlineSessionMaxLifespan": 5184000, - "clientSessionIdleTimeout": 0, - "clientSessionMaxLifespan": 0, - "clientOfflineSessionIdleTimeout": 0, - "clientOfflineSessionMaxLifespan": 0, - "accessCodeLifespan": 60, - "accessCodeLifespanUserAction": 300, - "accessCodeLifespanLogin": 1800, - "actionTokenGeneratedByAdminLifespan": 43200, - "actionTokenGeneratedByUserLifespan": 300, - "oauth2DeviceCodeLifespan": 600, - "oauth2DevicePollingInterval": 5, - "enabled": true, - "sslRequired": "external", - "registrationAllowed": false, - "registrationEmailAsUsername": false, - "rememberMe": false, - "verifyEmail": false, - "loginWithEmailAllowed": true, - "duplicateEmailsAllowed": false, - "resetPasswordAllowed": false, - "editUsernameAllowed": false, - "bruteForceProtected": false, - "permanentLockout": false, - "maxFailureWaitSeconds": 900, - "minimumQuickLoginWaitSeconds": 60, - "waitIncrementSeconds": 60, - "quickLoginCheckMilliSeconds": 1000, - "maxDeltaTimeSeconds": 43200, - "failureFactor": 30, - "roles": { - "realm": [ - { - "id": "13d76240-fcf8-4361-9dbf-de268717cfb2", - "name": "uma_authorization", - "description": "${role_uma_authorization}", - "composite": false, - "clientRole": false, - "containerId": "oidc-realm", - "attributes": {} - }, - { - "id": "88b414c4-3516-4486-8f8b-a811ed0e0ce5", - "name": "default-roles-oidc-realm", - "description": "${role_default-roles}", - "composite": true, - "composites": { - "realm": [ - "offline_access", - "uma_authorization" - ] - }, - "clientRole": false, - "containerId": "oidc-realm", - "attributes": {} - }, - { - "id": "b907fd4e-0e54-461c-9411-3f736eef7d2f", - "name": "offline_access", - "description": "${role_offline-access}", - "composite": false, - "clientRole": false, - "containerId": "oidc-realm", - "attributes": {} - } - ], - "client": { - "realm-management": [ - { - "id": "39342ea9-0b4e-4841-8996-433759e9297f", - "name": "create-client", - "description": "${role_create-client}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "f8680034-617d-45d3-9801-7bf0d704c549", - "name": "manage-users", - "description": "${role_manage-users}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "b08e4cc3-71e2-4395-b66b-fb1277b48b88", - "name": "manage-realm", - "description": "${role_manage-realm}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "c15dc407-d012-43af-9a21-a2923e1d7b74", - "name": "manage-events", - "description": "${role_manage-events}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "66c07cb7-42cd-4155-8485-6cc7bd37cba9", - "name": "view-realm", - "description": "${role_view-realm}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "0419515f-4ab8-43ca-ac69-e842195813c0", - "name": "view-events", - "description": "${role_view-events}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "aa553d5a-b2dc-4f81-979a-2af0a019fee0", - "name": "impersonation", - "description": "${role_impersonation}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "9567e1e9-b755-43a8-93ed-d5929391316f", - "name": "manage-clients", - "description": "${role_manage-clients}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "e3dab69f-7323-4aad-bf98-8b7697f36d57", - "name": "query-users", - "description": "${role_query-users}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "ee8a4855-d0d5-4261-bdba-b419d304a824", - "name": "query-groups", - "description": "${role_query-groups}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "4f251212-e922-4ac0-9cce-3ada607648d2", - "name": "view-identity-providers", - "description": "${role_view-identity-providers}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "34e1dc59-a975-424f-887b-52465e184a4b", - "name": "realm-admin", - "description": "${role_realm-admin}", - "composite": true, - "composites": { - "client": { - "realm-management": [ - "create-client", - "manage-users", - "manage-realm", - "manage-events", - "view-realm", - "view-events", - "impersonation", - "manage-clients", - "query-users", - "view-identity-providers", - "query-groups", - "view-clients", - "view-users", - "manage-authorization", - "manage-identity-providers", - "query-realms", - "query-clients", - "view-authorization" - ] - } - }, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "d35aca04-0182-40d3-96b8-1ce5cc118729", - "name": "view-clients", - "description": "${role_view-clients}", - "composite": true, - "composites": { - "client": { - "realm-management": [ - "query-clients" - ] - } - }, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "7d3b28d5-471a-4b2b-bc80-56d4ff80fd28", - "name": "view-users", - "description": "${role_view-users}", - "composite": true, - "composites": { - "client": { - "realm-management": [ - "query-users", - "query-groups" - ] - } - }, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "651059eb-fc1a-4f8d-9ced-ed28b0a2f965", - "name": "manage-authorization", - "description": "${role_manage-authorization}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "73f447e9-def8-4214-8516-56571f2c6f65", - "name": "manage-identity-providers", - "description": "${role_manage-identity-providers}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "1b5f7c39-885e-4246-8cf5-25769544fc3d", - "name": "query-realms", - "description": "${role_query-realms}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "350da4c1-69d4-4557-a9a8-8ba760db0225", - "name": "query-clients", - "description": "${role_query-clients}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - }, - { - "id": "43d51082-6922-4765-8022-529d91a4603f", - "name": "view-authorization", - "description": "${role_view-authorization}", - "composite": false, - "clientRole": true, - "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "attributes": {} - } - ], - "security-admin-console": [], - "admin-cli": [], - "account-console": [], - "broker": [], - "oidc-client": [], - "account": [ - { - "id": "a163535c-71de-4b2d-9530-26b25eeb1c1e", - "name": "delete-account", - "description": "${role_delete-account}", - "composite": false, - "clientRole": true, - "containerId": "aed2e103-ee29-4d5c-a34e-1b8c65b7d537", - "attributes": {} - }, - { - "id": "851c6a9f-bce7-4c70-be82-084c25d61b25", - "name": "manage-account", - "composite": false, - "clientRole": true, - "containerId": "aed2e103-ee29-4d5c-a34e-1b8c65b7d537", - "attributes": {} - } - ] - } - }, - "groups": [], - "defaultRole": { - "id": "88b414c4-3516-4486-8f8b-a811ed0e0ce5", - "name": "default-roles-oidc-realm", - "description": "${role_default-roles}", - "composite": true, - "clientRole": false, - "containerId": "oidc-realm" - }, - "requiredCredentials": [ - "password" - ], - "otpPolicyType": "totp", - "otpPolicyAlgorithm": "HmacSHA1", - "otpPolicyInitialCounter": 0, - "otpPolicyDigits": 6, - "otpPolicyLookAheadWindow": 1, - "otpPolicyPeriod": 30, - "otpSupportedApplications": [ - "FreeOTP", - "Google Authenticator" - ], - "webAuthnPolicyRpEntityName": "keycloak", - "webAuthnPolicySignatureAlgorithms": [ - "ES256" - ], - "webAuthnPolicyRpId": "", - "webAuthnPolicyAttestationConveyancePreference": "not specified", - "webAuthnPolicyAuthenticatorAttachment": "not specified", - "webAuthnPolicyRequireResidentKey": "not specified", - "webAuthnPolicyUserVerificationRequirement": "not specified", - "webAuthnPolicyCreateTimeout": 0, - "webAuthnPolicyAvoidSameAuthenticatorRegister": false, - "webAuthnPolicyAcceptableAaguids": [], - "webAuthnPolicyPasswordlessRpEntityName": "keycloak", - "webAuthnPolicyPasswordlessSignatureAlgorithms": [ - "ES256" - ], - "webAuthnPolicyPasswordlessRpId": "", - "webAuthnPolicyPasswordlessAttestationConveyancePreference": "not specified", - "webAuthnPolicyPasswordlessAuthenticatorAttachment": "not specified", - "webAuthnPolicyPasswordlessRequireResidentKey": "not specified", - "webAuthnPolicyPasswordlessUserVerificationRequirement": "not specified", - "webAuthnPolicyPasswordlessCreateTimeout": 0, - "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister": false, - "webAuthnPolicyPasswordlessAcceptableAaguids": [], - "users": [ - { - "username": "kcuser", - "enabled": true, - "totp": false, - "emailVerified": true, - "firstName": "Test", - "lastName": "Test", - "email": "test@test.com", - "credentials": [ - { - "type": "password", - "value": "kcpassword" - } - ] - } - ], - "scopeMappings": [ - { - "clientScope": "offline_access", - "roles": [ - "offline_access" - ] - } - ], - "clientScopeMappings": { - "account": [ - { - "client": "account-console", - "roles": [ - "manage-account" - ] - } - ] - }, - "clients": [ - { - "id": "aed2e103-ee29-4d5c-a34e-1b8c65b7d537", - "clientId": "account", - "name": "${client_account}", - "rootUrl": "${authBaseUrl}", - "baseUrl": "/realms/oidc-realm/account/", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [ - "/realms/oidc-realm/account/*" - ], - "webOrigins": [], - "notBefore": 0, - "bearerOnly": false, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": true, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": {}, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "1e821c0e-f6b9-4324-9b23-e82b5431fb72", - "clientId": "account-console", - "name": "${client_account-console}", - "rootUrl": "${authBaseUrl}", - "baseUrl": "/realms/oidc-realm/account/", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [ - "/realms/oidc-realm/account/*" - ], - "webOrigins": [], - "notBefore": 0, - "bearerOnly": false, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": true, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": { - "pkce.code.challenge.method": "S256" - }, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "protocolMappers": [ - { - "id": "397616ab-4124-4a13-92b6-317423e818a3", - "name": "audience resolve", - "protocol": "openid-connect", - "protocolMapper": "oidc-audience-resolve-mapper", - "consentRequired": false, - "config": {} - } - ], - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "dddcc3e0-d742-422b-8b5f-84a292ea9d66", - "clientId": "admin-cli", - "name": "${client_admin-cli}", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [], - "webOrigins": [], - "notBefore": 0, - "bearerOnly": false, - "consentRequired": false, - "standardFlowEnabled": false, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": true, - "serviceAccountsEnabled": false, - "publicClient": true, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": {}, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "df6f6cd0-a046-492f-84ac-b4fe31909be4", - "clientId": "broker", - "name": "${client_broker}", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [], - "webOrigins": [], - "notBefore": 0, - "bearerOnly": true, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": false, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": {}, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "c0af31b9-21aa-4e70-baf3-8d68850c4081", - "clientId": "oidc-client", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "secret": "ss6gE8mODCDfqesQaSG3gwUwZqZt547E", - "redirectUris": [ - "*" - ], - "webOrigins": [ - "+" - ], - "notBefore": 0, - "bearerOnly": false, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": false, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": { - "saml.force.post.binding": "false", - "saml.multivalued.roles": "false", - "oauth2.device.authorization.grant.enabled": "false", - "use.jwks.url": "true", - "backchannel.logout.revoke.offline.tokens": "false", - "saml.server.signature.keyinfo.ext": "false", - "use.refresh.tokens": "true", - "jwt.credential.certificate": "MIICpTCCAY0CBgGE8V6o6TANBgkqhkiG9w0BAQsFADAWMRQwEgYDVQQDDAtvaWRjLWNsaWVudDAeFw0yMjEyMDgxMDUyMDNaFw0zMjEyMDgxMDUzNDNaMBYxFDASBgNVBAMMC29pZGMtY2xpZW50MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArUffTl+jXWzyY3T4VVtkiGyNnY+RgyAXUzz+dxT7wUQaYSiNPvmaxnio555pWjR403SRUjVxM8eJYgHK9s43qQWdheXBIHyLKaQfjVsTtSmHgFtPmjk+kweQs6fxUi5CNvtx4RTCaOK5wV8q5q1X7mb8cZ5+gLSx1f/pHtayFXMT75nV04aZKWgPztPz8w+QXUx9cuFY4OIiTdRbdyfr1oOiDtMbxxA22tggB/HSMVkSckT3LSPj7fJKJMPFYi/g1AXxGipX/q8XkmOBrvNePCpH0F/IZbC1vXEsDC6urfoijOdiZgPMobuADmWHPiw2zgCN8qa6QuLFaI+JduXT9QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQCEOYRHkH8DnBucb+uN5c9U/fZY+mpglxzZvby7dGBXfVwLN+eP1kGcQPaFi+nshk7FgF4mR5/cmuAPZt+YBbgP0z37D49nB7S6sniwzfhCAAplOT4vmm+MjperTDsWFUGhQZJvN/jxqP2Xccw7N//ReYi7yOlmWhwGyqQyTi0ySbE3BY5eFvUKepekybYi/15XlyF8lwS2jH1MvnJAxAMNVpVUcP4wTnq/dOw5ybrVWF0mPnA8KVzTPuPE5nzZvZ3rkXQeEJTffIToR+T/DH/KTLXcNUtx4nG0ajJ0gM6iVAXGnKlI9Viq/M5Ese+52I6rQmxTsFMn57LNzKgMpWcE", - "oidc.ciba.grant.enabled": "false", - "use.jwks.string": "false", - "backchannel.logout.session.required": "false", - "client_credentials.use_refresh_token": "false", - "require.pushed.authorization.requests": "false", - "saml.client.signature": "false", - "id.token.as.detached.signature": "false", - "saml.assertion.signature": "false", - "saml.encrypt": "false", - "saml.server.signature": "false", - "exclude.session.state.from.auth.response": "false", - "saml.artifact.binding": "false", - "saml_force_name_id_format": "false", - "tls.client.certificate.bound.access.tokens": "false", - "saml.authnstatement": "false", - "display.on.consent.screen": "false", - "saml.onetimeuse.condition": "false" - }, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": true, - "nodeReRegistrationTimeout": -1, - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "43ffb712-f233-48e2-ae79-d6993bac34a5", - "clientId": "realm-management", - "name": "${client_realm-management}", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [], - "webOrigins": [], - "notBefore": 0, - "bearerOnly": true, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": false, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": {}, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - }, - { - "id": "3747f98f-efbb-49ef-8238-a349bf5ab409", - "clientId": "security-admin-console", - "name": "${client_security-admin-console}", - "rootUrl": "${authAdminUrl}", - "baseUrl": "/admin/oidc-realm/console/", - "surrogateAuthRequired": false, - "enabled": true, - "alwaysDisplayInConsole": false, - "clientAuthenticatorType": "client-secret", - "redirectUris": [ - "/admin/oidc-realm/console/*" - ], - "webOrigins": [ - "+" - ], - "notBefore": 0, - "bearerOnly": false, - "consentRequired": false, - "standardFlowEnabled": true, - "implicitFlowEnabled": false, - "directAccessGrantsEnabled": false, - "serviceAccountsEnabled": false, - "publicClient": true, - "frontchannelLogout": false, - "protocol": "openid-connect", - "attributes": { - "pkce.code.challenge.method": "S256" - }, - "authenticationFlowBindingOverrides": {}, - "fullScopeAllowed": false, - "nodeReRegistrationTimeout": 0, - "protocolMappers": [ - { - "id": "2fbdf6c9-ee69-4edc-b780-ec62aecfc519", - "name": "locale", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "locale", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "locale", - "jsonType.label": "String" - } - } - ], - "defaultClientScopes": [ - "web-origins", - "roles", - "profile", - "email" - ], - "optionalClientScopes": [ - "address", - "phone", - "offline_access", - "microprofile-jwt" - ] - } - ], - "clientScopes": [ - { - "id": "f76f507d-7d1c-495b-9504-47830b3834f1", - "name": "phone", - "description": "OpenID Connect built-in scope: phone", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "true", - "display.on.consent.screen": "true", - "consent.screen.text": "${phoneScopeConsentText}" - }, - "protocolMappers": [ - { - "id": "be849ec8-1747-4efb-bc00-beeaf44f11c8", - "name": "phone number verified", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "phoneNumberVerified", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "phone_number_verified", - "jsonType.label": "boolean" - } - }, - { - "id": "8e8600ec-4290-435d-b109-9f0547cb4a1d", - "name": "phone number", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "phoneNumber", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "phone_number", - "jsonType.label": "String" - } - } - ] - }, - { - "id": "54b87197-5309-4b2c-8ad9-f561a0fc178a", - "name": "role_list", - "description": "SAML role list", - "protocol": "saml", - "attributes": { - "consent.screen.text": "${samlRoleListScopeConsentText}", - "display.on.consent.screen": "true" - }, - "protocolMappers": [ - { - "id": "5fd831af-19a5-4a9c-b44f-2a806fae011c", - "name": "role list", - "protocol": "saml", - "protocolMapper": "saml-role-list-mapper", - "consentRequired": false, - "config": { - "single": "false", - "attribute.nameformat": "Basic", - "attribute.name": "Role" - } - } - ] - }, - { - "id": "2f85470d-8cb7-4f07-8602-47342d68af86", - "name": "web-origins", - "description": "OpenID Connect scope for add allowed web origins to the access token", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "false", - "display.on.consent.screen": "false", - "consent.screen.text": "" - }, - "protocolMappers": [ - { - "id": "c5d2aafc-f72d-4d7b-9d88-cd759f0e045e", - "name": "allowed web origins", - "protocol": "openid-connect", - "protocolMapper": "oidc-allowed-origins-mapper", - "consentRequired": false, - "config": {} - } - ] - }, - { - "id": "528face9-229a-4adf-98d8-68b1a22e880d", - "name": "microprofile-jwt", - "description": "Microprofile - JWT built-in scope", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "true", - "display.on.consent.screen": "false" - }, - "protocolMappers": [ - { - "id": "89240a7c-10f3-4e09-9d6b-41955b86c58d", - "name": "groups", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-realm-role-mapper", - "consentRequired": false, - "config": { - "multivalued": "true", - "userinfo.token.claim": "true", - "user.attribute": "foo", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "groups", - "jsonType.label": "String" - } - }, - { - "id": "15b6db72-4870-480e-a675-87f87df5f8a5", - "name": "upn", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "username", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "upn", - "jsonType.label": "String" - } - } - ] - }, - { - "id": "cdd11477-b02b-4886-bc6d-cf4b728ebc0e", - "name": "email", - "description": "OpenID Connect built-in scope: email", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "true", - "display.on.consent.screen": "true", - "consent.screen.text": "${emailScopeConsentText}" - }, - "protocolMappers": [ - { - "id": "627b9f4f-23d6-4480-adf4-264faf58de33", - "name": "email verified", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "emailVerified", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "email_verified", - "jsonType.label": "boolean" - } - }, - { - "id": "6a2adf2e-db2d-4ebe-8d48-f658f9b4a5ca", - "name": "email", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "email", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "email", - "jsonType.label": "String" - } - } - ] - }, - { - "id": "8f830142-b3f1-40f0-82e2-ceed68857a40", - "name": "roles", - "description": "OpenID Connect scope for add user roles to the access token", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "false", - "display.on.consent.screen": "true", - "consent.screen.text": "${rolesScopeConsentText}" - }, - "protocolMappers": [ - { - "id": "28a96dc6-c4dc-4aae-b316-28b56dccd077", - "name": "audience resolve", - "protocol": "openid-connect", - "protocolMapper": "oidc-audience-resolve-mapper", - "consentRequired": false, - "config": {} - }, - { - "id": "3e81050f-540e-4f3d-9abf-86406e484f76", - "name": "realm roles", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-realm-role-mapper", - "consentRequired": false, - "config": { - "user.attribute": "foo", - "access.token.claim": "true", - "claim.name": "realm_access.roles", - "jsonType.label": "String", - "multivalued": "true" - } - }, - { - "id": "13afa1f4-3fac-4c90-a9b4-e84e682f46e9", - "name": "client roles", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-client-role-mapper", - "consentRequired": false, - "config": { - "user.attribute": "foo", - "access.token.claim": "true", - "claim.name": "resource_access.${client_id}.roles", - "jsonType.label": "String", - "multivalued": "true" - } - } - ] - }, - { - "id": "3beac2fc-e947-408f-8422-ca9a1e66a258", - "name": "address", - "description": "OpenID Connect built-in scope: address", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "true", - "display.on.consent.screen": "true", - "consent.screen.text": "${addressScopeConsentText}" - }, - "protocolMappers": [ - { - "id": "12911891-db5c-4a35-80fa-555c5eda7e68", - "name": "address", - "protocol": "openid-connect", - "protocolMapper": "oidc-address-mapper", - "consentRequired": false, - "config": { - "user.attribute.formatted": "formatted", - "user.attribute.country": "country", - "user.attribute.postal_code": "postal_code", - "userinfo.token.claim": "true", - "user.attribute.street": "street", - "id.token.claim": "true", - "user.attribute.region": "region", - "access.token.claim": "true", - "user.attribute.locality": "locality" - } - } - ] - }, - { - "id": "8a29297a-e6f6-41ae-b25d-8a14236de535", - "name": "offline_access", - "description": "OpenID Connect built-in scope: offline_access", - "protocol": "openid-connect", - "attributes": { - "consent.screen.text": "${offlineAccessScopeConsentText}", - "display.on.consent.screen": "true" - } - }, - { - "id": "ce1622c5-701f-4e3e-9d2d-8dae0f07a295", - "name": "profile", - "description": "OpenID Connect built-in scope: profile", - "protocol": "openid-connect", - "attributes": { - "include.in.token.scope": "true", - "display.on.consent.screen": "true", - "consent.screen.text": "${profileScopeConsentText}" - }, - "protocolMappers": [ - { - "id": "98cc62b8-250a-4087-92da-bb0f0931e675", - "name": "full name", - "protocol": "openid-connect", - "protocolMapper": "oidc-full-name-mapper", - "consentRequired": false, - "config": { - "id.token.claim": "true", - "access.token.claim": "true", - "userinfo.token.claim": "true" - } - }, - { - "id": "b99c8c44-4cc9-4c87-a5a1-c14e64d472ae", - "name": "given name", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "firstName", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "given_name", - "jsonType.label": "String" - } - }, - { - "id": "903d5932-bdec-42bc-a53c-3cce93deaa1c", - "name": "zoneinfo", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "zoneinfo", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "zoneinfo", - "jsonType.label": "String" - } - }, - { - "id": "ccbdc095-28f7-4769-8261-2e32c7b6fab0", - "name": "picture", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "picture", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "picture", - "jsonType.label": "String" - } - }, - { - "id": "22a4a38c-f755-44f3-b847-803c7fb3cef5", - "name": "birthdate", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "birthdate", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "birthdate", - "jsonType.label": "String" - } - }, - { - "id": "78726920-b4e2-4ed2-b9e0-df38a7f82376", - "name": "updated at", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "updatedAt", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "updated_at", - "jsonType.label": "String" - } - }, - { - "id": "c64c6eb8-5cbe-4092-bf2c-dd02b8c0e0e8", - "name": "family name", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "lastName", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "family_name", - "jsonType.label": "String" - } - }, - { - "id": "306784d8-8da1-48d8-92a3-dccfff83bcaf", - "name": "middle name", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "middleName", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "middle_name", - "jsonType.label": "String" - } - }, - { - "id": "0ff127fa-774e-43a8-a1fc-47ea3f307aa1", - "name": "website", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "website", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "website", - "jsonType.label": "String" - } - }, - { - "id": "8989c6f8-25c5-4d02-aa06-25b3b77fc227", - "name": "profile", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "profile", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "profile", - "jsonType.label": "String" - } - }, - { - "id": "3b67000c-9cbf-43ee-9e05-26f560871897", - "name": "gender", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "gender", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "gender", - "jsonType.label": "String" - } - }, - { - "id": "c28b04de-2770-423e-9b9a-b3321d7300e2", - "name": "nickname", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "nickname", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "nickname", - "jsonType.label": "String" - } - }, - { - "id": "fd791ed4-d4ab-4df9-81b4-c69a3134bcab", - "name": "username", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-property-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "username", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "preferred_username", - "jsonType.label": "String" - } - }, - { - "id": "c7378ce5-3673-47b2-9ebc-92c772bebf9f", - "name": "locale", - "protocol": "openid-connect", - "protocolMapper": "oidc-usermodel-attribute-mapper", - "consentRequired": false, - "config": { - "userinfo.token.claim": "true", - "user.attribute": "locale", - "id.token.claim": "true", - "access.token.claim": "true", - "claim.name": "locale", - "jsonType.label": "String" - } - } - ] - } - ], - "defaultDefaultClientScopes": [ - "web-origins", - "role_list", - "roles", - "email", - "profile" - ], - "defaultOptionalClientScopes": [ - "address", - "microprofile-jwt", - "offline_access", - "phone" - ], - "browserSecurityHeaders": { - "contentSecurityPolicyReportOnly": "", - "xContentTypeOptions": "nosniff", - "xRobotsTag": "none", - "xFrameOptions": "SAMEORIGIN", - "contentSecurityPolicy": "frame-src 'self'; frame-ancestors 'self'; object-src 'none';", - "xXSSProtection": "1; mode=block", - "strictTransportSecurity": "max-age=31536000; includeSubDomains" - }, - "smtpServer": {}, - "eventsEnabled": false, - "eventsListeners": [ - "jboss-logging" - ], - "enabledEventTypes": [], - "adminEventsEnabled": false, - "adminEventsDetailsEnabled": false, - "identityProviders": [], - "identityProviderMappers": [], - "components": { - "org.keycloak.services.clientregistration.policy.ClientRegistrationPolicy": [ - { - "id": "8e2d0c22-0627-4115-9f14-4225244333d9", - "name": "Trusted Hosts", - "providerId": "trusted-hosts", - "subType": "anonymous", - "subComponents": {}, - "config": { - "host-sending-registration-request-must-match": [ - "true" - ], - "client-uris-must-match": [ - "true" - ] - } - }, - { - "id": "45bdde87-a364-4d66-a12e-1a4fd42c85fb", - "name": "Full Scope Disabled", - "providerId": "scope", - "subType": "anonymous", - "subComponents": {}, - "config": {} - }, - { - "id": "7b7d3215-68d2-41db-bc0f-db0a45934a84", - "name": "Allowed Client Scopes", - "providerId": "allowed-client-templates", - "subType": "anonymous", - "subComponents": {}, - "config": { - "allow-default-scopes": [ - "true" - ] - } - }, - { - "id": "e067781a-6058-4f2b-9408-3390e9854cf8", - "name": "Consent Required", - "providerId": "consent-required", - "subType": "anonymous", - "subComponents": {}, - "config": {} - }, - { - "id": "296be954-8084-45c8-b6f3-94d53f7341f6", - "name": "Allowed Protocol Mapper Types", - "providerId": "allowed-protocol-mappers", - "subType": "anonymous", - "subComponents": {}, - "config": { - "allowed-protocol-mapper-types": [ - "saml-role-list-mapper", - "saml-user-property-mapper", - "oidc-usermodel-attribute-mapper", - "oidc-address-mapper", - "oidc-sha256-pairwise-sub-mapper", - "saml-user-attribute-mapper", - "oidc-usermodel-property-mapper", - "oidc-full-name-mapper" - ] - } - }, - { - "id": "b9a2a484-aee1-4633-aa37-a9ab2b74a239", - "name": "Allowed Client Scopes", - "providerId": "allowed-client-templates", - "subType": "authenticated", - "subComponents": {}, - "config": { - "allow-default-scopes": [ - "true" - ] - } - }, - { - "id": "016e4914-a32c-40fa-8aab-3eb25a411df5", - "name": "Max Clients Limit", - "providerId": "max-clients", - "subType": "anonymous", - "subComponents": {}, - "config": { - "max-clients": [ - "200" - ] - } - }, - { - "id": "a4fb2fa3-93b8-4497-8047-424f70f298c7", - "name": "Allowed Protocol Mapper Types", - "providerId": "allowed-protocol-mappers", - "subType": "authenticated", - "subComponents": {}, - "config": { - "allowed-protocol-mapper-types": [ - "oidc-sha256-pairwise-sub-mapper", - "oidc-full-name-mapper", - "saml-user-property-mapper", - "saml-role-list-mapper", - "oidc-usermodel-attribute-mapper", - "oidc-address-mapper", - "oidc-usermodel-property-mapper", - "saml-user-attribute-mapper" - ] - } - } - ], - "org.keycloak.keys.KeyProvider": [ - { - "id": "31b693fa-2b95-47a6-96a1-dfff868ca1df", - "name": "rsa-enc-generated", - "providerId": "rsa-enc-generated", - "subComponents": {}, - "config": { - "priority": [ - "100" - ], - "algorithm": [ - "RSA-OAEP" - ] - } - }, - { - "id": "f1e63d09-45a0-4382-8346-0408ee906649", - "name": "hmac-generated", - "providerId": "hmac-generated", - "subComponents": {}, - "config": { - "priority": [ - "100" - ], - "algorithm": [ - "HS256" - ] - } - }, - { - "id": "99084d92-06f5-4787-b932-a40b5377f3cb", - "name": "rsa-generated", - "providerId": "rsa-generated", - "subComponents": {}, - "config": { - "priority": [ - "100" - ] - } - }, - { - "id": "9887f1bf-b4f7-4646-9919-a9dbde13ce74", - "name": "aes-generated", - "providerId": "aes-generated", - "subComponents": {}, - "config": { - "priority": [ - "100" - ] - } - } - ] - }, - "internationalizationEnabled": false, - "supportedLocales": [], - "authenticationFlows": [ - { - "id": "a7f91199-178d-4399-8319-5063ffcc37b0", - "alias": "Account verification options", - "description": "Method with which to verity the existing account", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "idp-email-verification", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "ALTERNATIVE", - "priority": 20, - "flowAlias": "Verify Existing Account by Re-authentication", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "602533e3-f7a1-4e25-9a12-f3080eeccec3", - "alias": "Authentication Options", - "description": "Authentication options.", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "basic-auth", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "basic-auth-otp", - "authenticatorFlow": false, - "requirement": "DISABLED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "auth-spnego", - "authenticatorFlow": false, - "requirement": "DISABLED", - "priority": 30, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "ba7bcdfd-05c6-4da6-827b-24e3513bddbe", - "alias": "Browser - Conditional OTP", - "description": "Flow to determine if the OTP is required for the authentication", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "conditional-user-configured", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "auth-otp-form", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "d0f62327-ef2f-4561-8b5a-1f61faecdac0", - "alias": "Direct Grant - Conditional OTP", - "description": "Flow to determine if the OTP is required for the authentication", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "conditional-user-configured", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "direct-grant-validate-otp", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "f10b85d0-26ee-4648-b81b-80213b066d76", - "alias": "First broker login - Conditional OTP", - "description": "Flow to determine if the OTP is required for the authentication", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "conditional-user-configured", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "auth-otp-form", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "d6af4ac0-f6bc-4197-bf01-6e2c321ecaad", - "alias": "Handle Existing Account", - "description": "Handle what to do if there is existing account with same email/username like authenticated identity provider", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "idp-confirm-link", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "REQUIRED", - "priority": 20, - "flowAlias": "Account verification options", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "501ab743-2e2f-427d-820f-14deed111b08", - "alias": "Reset - Conditional OTP", - "description": "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "conditional-user-configured", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "reset-otp", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "e02c3a63-a09d-4dde-9f6c-22c95eef8534", - "alias": "User creation or linking", - "description": "Flow for the existing/non-existing user alternatives", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticatorConfig": "create unique user config", - "authenticator": "idp-create-user-if-unique", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "ALTERNATIVE", - "priority": 20, - "flowAlias": "Handle Existing Account", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "c348906d-6266-4e68-937e-8f3d15c66524", - "alias": "Verify Existing Account by Re-authentication", - "description": "Reauthentication of existing account", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "idp-username-password-form", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "CONDITIONAL", - "priority": 20, - "flowAlias": "First broker login - Conditional OTP", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "cf6ba166-43d5-4687-95c4-0a184ca08885", - "alias": "browser", - "description": "browser based authentication", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "auth-cookie", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "auth-spnego", - "authenticatorFlow": false, - "requirement": "DISABLED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "identity-provider-redirector", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 25, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "ALTERNATIVE", - "priority": 30, - "flowAlias": "forms", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "87cb4f25-9275-4617-9e95-63adf1ce3ece", - "alias": "clients", - "description": "Base authentication for clients", - "providerId": "client-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "client-secret", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "client-jwt", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "client-secret-jwt", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 30, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "client-x509", - "authenticatorFlow": false, - "requirement": "ALTERNATIVE", - "priority": 40, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "e75b99c5-c566-4009-b0ba-c73716bed254", - "alias": "direct grant", - "description": "OpenID Connect Resource Owner Grant", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "direct-grant-validate-username", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "direct-grant-validate-password", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "CONDITIONAL", - "priority": 30, - "flowAlias": "Direct Grant - Conditional OTP", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "8a97380c-0f70-45cb-a7b0-780eb70453ba", - "alias": "docker auth", - "description": "Used by Docker clients to authenticate against the IDP", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "docker-http-basic-authenticator", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "131e0aad-5422-4504-bafc-96be2fa44c34", - "alias": "first broker login", - "description": "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticatorConfig": "review profile config", - "authenticator": "idp-review-profile", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "REQUIRED", - "priority": 20, - "flowAlias": "User creation or linking", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "e7d4b793-b3c2-4ec3-a2b1-04f7217e8f46", - "alias": "forms", - "description": "Username, password, otp and other auth forms.", - "providerId": "basic-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "auth-username-password-form", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "CONDITIONAL", - "priority": 20, - "flowAlias": "Browser - Conditional OTP", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "f59a7688-61a1-4ac9-a13a-03f92e022add", - "alias": "http challenge", - "description": "An authentication flow based on challenge-response HTTP Authentication Schemes", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "no-cookie-redirect", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "REQUIRED", - "priority": 20, - "flowAlias": "Authentication Options", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "80a7b0f5-abb3-4780-be58-4ed1dc3e50fa", - "alias": "registration", - "description": "registration flow", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "registration-page-form", - "authenticatorFlow": true, - "requirement": "REQUIRED", - "priority": 10, - "flowAlias": "registration form", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "f18231cf-b803-493b-9dd6-ee8fa602c861", - "alias": "registration form", - "description": "registration form", - "providerId": "form-flow", - "topLevel": false, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "registration-user-creation", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "registration-profile-action", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 40, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "registration-password-action", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 50, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "registration-recaptcha-action", - "authenticatorFlow": false, - "requirement": "DISABLED", - "priority": 60, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - }, - { - "id": "34ccfce6-1488-4db3-b90e-d98e8d8b2ae6", - "alias": "reset credentials", - "description": "Reset credentials for a user if they forgot their password or something", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "reset-credentials-choose-user", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "reset-credential-email", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 20, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticator": "reset-password", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 30, - "userSetupAllowed": false, - "autheticatorFlow": false - }, - { - "authenticatorFlow": true, - "requirement": "CONDITIONAL", - "priority": 40, - "flowAlias": "Reset - Conditional OTP", - "userSetupAllowed": false, - "autheticatorFlow": true - } - ] - }, - { - "id": "4468100c-fa83-4c16-8970-d53cb592f93a", - "alias": "saml ecp", - "description": "SAML ECP Profile Authentication Flow", - "providerId": "basic-flow", - "topLevel": true, - "builtIn": true, - "authenticationExecutions": [ - { - "authenticator": "http-basic-authenticator", - "authenticatorFlow": false, - "requirement": "REQUIRED", - "priority": 10, - "userSetupAllowed": false, - "autheticatorFlow": false - } - ] - } - ], - "authenticatorConfig": [ - { - "id": "c3bb087e-7fe9-4f13-b1bd-c2d7d1320054", - "alias": "create unique user config", - "config": { - "require.password.update.after.registration": "false" - } - }, - { - "id": "09820d9d-3c12-45f3-bc62-97b53f8a7efe", - "alias": "review profile config", - "config": { - "update.profile.on.first.login": "missing" - } - } - ], - "requiredActions": [ - { - "alias": "CONFIGURE_TOTP", - "name": "Configure OTP", - "providerId": "CONFIGURE_TOTP", - "enabled": true, - "defaultAction": false, - "priority": 10, - "config": {} - }, - { - "alias": "terms_and_conditions", - "name": "Terms and Conditions", - "providerId": "terms_and_conditions", - "enabled": false, - "defaultAction": false, - "priority": 20, - "config": {} - }, - { - "alias": "UPDATE_PASSWORD", - "name": "Update Password", - "providerId": "UPDATE_PASSWORD", - "enabled": true, - "defaultAction": false, - "priority": 30, - "config": {} - }, - { - "alias": "UPDATE_PROFILE", - "name": "Update Profile", - "providerId": "UPDATE_PROFILE", - "enabled": true, - "defaultAction": false, - "priority": 40, - "config": {} - }, - { - "alias": "VERIFY_EMAIL", - "name": "Verify Email", - "providerId": "VERIFY_EMAIL", - "enabled": true, - "defaultAction": false, - "priority": 50, - "config": {} - }, - { - "alias": "delete_account", - "name": "Delete Account", - "providerId": "delete_account", - "enabled": false, - "defaultAction": false, - "priority": 60, - "config": {} - }, - { - "alias": "update_user_locale", - "name": "Update User Locale", - "providerId": "update_user_locale", - "enabled": true, - "defaultAction": false, - "priority": 1000, - "config": {} - } - ], - "browserFlow": "browser", - "registrationFlow": "registration", - "directGrantFlow": "direct grant", - "resetCredentialsFlow": "reset credentials", - "clientAuthenticationFlow": "clients", - "dockerAuthenticationFlow": "docker auth", - "attributes": { - "cibaBackchannelTokenDeliveryMode": "poll", - "cibaExpiresIn": "120", - "cibaAuthRequestedUserHint": "login_hint", - "oauth2DeviceCodeLifespan": "600", - "clientOfflineSessionMaxLifespan": "0", - "oauth2DevicePollingInterval": "5", - "clientSessionIdleTimeout": "0", - "parRequestUriLifespan": "60", - "clientSessionMaxLifespan": "0", - "clientOfflineSessionIdleTimeout": "0", - "cibaInterval": "5" - }, - "keycloakVersion": "16.1.1", - "userManagedAccessAllowed": false, - "clientProfiles": { - "profiles": [] - }, - "clientPolicies": { - "policies": [] - } -} diff --git a/conf/keycloak/run-keycloak.sh b/conf/keycloak/run-keycloak.sh index effb37f91b8..ddc5108bee4 100755 --- a/conf/keycloak/run-keycloak.sh +++ b/conf/keycloak/run-keycloak.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -DOCKER_IMAGE="jboss/keycloak:16.1.1" +DOCKER_IMAGE="quay.io/keycloak/keycloak:21.0" KEYCLOAK_USER="kcadmin" KEYCLOAK_PASSWORD="kcpassword" KEYCLOAK_PORT=8090 @@ -11,7 +11,7 @@ if [ ! "$(docker ps -q -f name=^/keycloak$)" ]; then docker start keycloak echo "INFO - Keycloak container restarted" else - docker run -d --name keycloak -p $KEYCLOAK_PORT:8080 -e KEYCLOAK_USER=$KEYCLOAK_USER -e KEYCLOAK_PASSWORD=$KEYCLOAK_PASSWORD -e KEYCLOAK_IMPORT=/tmp/oidc-realm.json -v "$(pwd)"/oidc-realm.json:/tmp/oidc-realm.json $DOCKER_IMAGE + docker run -d --name keycloak -p $KEYCLOAK_PORT:8080 -e KEYCLOAK_USER=$KEYCLOAK_USER -e KEYCLOAK_PASSWORD=$KEYCLOAK_PASSWORD -e KEYCLOAK_IMPORT=/tmp/test-realm.json -v "$(pwd)"/test-realm.json:/tmp/test-realm.json $DOCKER_IMAGE echo "INFO - Keycloak container created and running" fi else diff --git a/conf/keycloak/test-realm.json b/conf/keycloak/test-realm.json new file mode 100644 index 00000000000..efe71cc5d29 --- /dev/null +++ b/conf/keycloak/test-realm.json @@ -0,0 +1,1939 @@ +{ + "id" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "realm" : "test", + "displayName" : "", + "displayNameHtml" : "", + "notBefore" : 0, + "defaultSignatureAlgorithm" : "RS256", + "revokeRefreshToken" : false, + "refreshTokenMaxReuse" : 0, + "accessTokenLifespan" : 300, + "accessTokenLifespanForImplicitFlow" : 900, + "ssoSessionIdleTimeout" : 1800, + "ssoSessionMaxLifespan" : 36000, + "ssoSessionIdleTimeoutRememberMe" : 0, + "ssoSessionMaxLifespanRememberMe" : 0, + "offlineSessionIdleTimeout" : 2592000, + "offlineSessionMaxLifespanEnabled" : false, + "offlineSessionMaxLifespan" : 5184000, + "clientSessionIdleTimeout" : 0, + "clientSessionMaxLifespan" : 0, + "clientOfflineSessionIdleTimeout" : 0, + "clientOfflineSessionMaxLifespan" : 0, + "accessCodeLifespan" : 60, + "accessCodeLifespanUserAction" : 300, + "accessCodeLifespanLogin" : 1800, + "actionTokenGeneratedByAdminLifespan" : 43200, + "actionTokenGeneratedByUserLifespan" : 300, + "oauth2DeviceCodeLifespan" : 600, + "oauth2DevicePollingInterval" : 5, + "enabled" : true, + "sslRequired" : "none", + "registrationAllowed" : false, + "registrationEmailAsUsername" : false, + "rememberMe" : false, + "verifyEmail" : false, + "loginWithEmailAllowed" : true, + "duplicateEmailsAllowed" : false, + "resetPasswordAllowed" : false, + "editUsernameAllowed" : false, + "bruteForceProtected" : false, + "permanentLockout" : false, + "maxFailureWaitSeconds" : 900, + "minimumQuickLoginWaitSeconds" : 60, + "waitIncrementSeconds" : 60, + "quickLoginCheckMilliSeconds" : 1000, + "maxDeltaTimeSeconds" : 43200, + "failureFactor" : 30, + "roles" : { + "realm" : [ { + "id" : "075daee1-5ab2-44b5-adbf-fa49a3da8305", + "name" : "uma_authorization", + "description" : "${role_uma_authorization}", + "composite" : false, + "clientRole" : false, + "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "attributes" : { } + }, { + "id" : "b4ff9091-ddf9-4536-b175-8cfa3e331d71", + "name" : "default-roles-test", + "description" : "${role_default-roles}", + "composite" : true, + "composites" : { + "realm" : [ "offline_access", "uma_authorization" ], + "client" : { + "account" : [ "view-profile", "manage-account" ] + } + }, + "clientRole" : false, + "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "attributes" : { } + }, { + "id" : "e6d31555-6be6-4dee-bc6a-40a53108e4c2", + "name" : "offline_access", + "description" : "${role_offline-access}", + "composite" : false, + "clientRole" : false, + "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983", + "attributes" : { } + } ], + "client" : { + "realm-management" : [ { + "id" : "1955bd12-5f86-4a74-b130-d68a8ef6f0ee", + "name" : "impersonation", + "description" : "${role_impersonation}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "1109c350-9ab1-426c-9876-ef67d4310f35", + "name" : "view-authorization", + "description" : "${role_view-authorization}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "980c3fd3-1ae3-4b8f-9a00-d764c939035f", + "name" : "query-users", + "description" : "${role_query-users}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "5363e601-0f9d-4633-a8c8-28cb0f859b7b", + "name" : "query-groups", + "description" : "${role_query-groups}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "59aa7992-ad78-48db-868a-25d6e1d7db50", + "name" : "realm-admin", + "description" : "${role_realm-admin}", + "composite" : true, + "composites" : { + "client" : { + "realm-management" : [ "impersonation", "view-authorization", "query-users", "query-groups", "manage-clients", "manage-realm", "view-identity-providers", "query-realms", "manage-authorization", "manage-identity-providers", "manage-users", "view-users", "view-realm", "create-client", "view-clients", "manage-events", "query-clients", "view-events" ] + } + }, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "112f53c2-897d-4c01-81db-b8dc10c5b995", + "name" : "manage-clients", + "description" : "${role_manage-clients}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "c7f57bbd-ef32-4a64-9888-7b8abd90777a", + "name" : "manage-realm", + "description" : "${role_manage-realm}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "8885dac8-0af3-45af-94ce-eff5e801bb80", + "name" : "view-identity-providers", + "description" : "${role_view-identity-providers}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "2673346c-b0ef-4e01-8a90-be03866093af", + "name" : "manage-authorization", + "description" : "${role_manage-authorization}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "b7182885-9e57-445f-8dae-17c16eb31b5d", + "name" : "manage-identity-providers", + "description" : "${role_manage-identity-providers}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "ba7bfe0c-cb07-4a47-b92c-b8132b57e181", + "name" : "manage-users", + "description" : "${role_manage-users}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "13a8f0fc-647d-4bfe-b525-73956898e550", + "name" : "query-realms", + "description" : "${role_query-realms}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "ef4c57dc-78c2-4f9a-8d2b-0e97d46fc842", + "name" : "view-realm", + "description" : "${role_view-realm}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "2875da34-006c-4b7f-bfc8-9ae8e46af3a2", + "name" : "view-users", + "description" : "${role_view-users}", + "composite" : true, + "composites" : { + "client" : { + "realm-management" : [ "query-users", "query-groups" ] + } + }, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "c8c8f7dc-876b-4263-806f-3329f7cd5fd3", + "name" : "create-client", + "description" : "${role_create-client}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "21b84f90-5a9a-4845-a7ba-bbd98ac0fcc4", + "name" : "view-clients", + "description" : "${role_view-clients}", + "composite" : true, + "composites" : { + "client" : { + "realm-management" : [ "query-clients" ] + } + }, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "6fd64c94-d663-4501-ad77-0dcf8887d434", + "name" : "manage-events", + "description" : "${role_manage-events}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "b321927a-023c-4d2a-99ad-24baf7ff6d83", + "name" : "query-clients", + "description" : "${role_query-clients}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + }, { + "id" : "2fc21160-78de-457b-8594-e5c76cde1d5e", + "name" : "view-events", + "description" : "${role_view-events}", + "composite" : false, + "clientRole" : true, + "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "attributes" : { } + } ], + "test" : [ ], + "security-admin-console" : [ ], + "admin-cli" : [ ], + "account-console" : [ ], + "broker" : [ { + "id" : "07ee59b5-dca6-48fb-83d4-2994ef02850e", + "name" : "read-token", + "description" : "${role_read-token}", + "composite" : false, + "clientRole" : true, + "containerId" : "b57d62bb-77ff-42bd-b8ff-381c7288f327", + "attributes" : { } + } ], + "account" : [ { + "id" : "17d2f811-7bdf-4c73-83b4-1037001797b8", + "name" : "view-applications", + "description" : "${role_view-applications}", + "composite" : false, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "d1ff44f9-419e-42fd-98e8-1add1169a972", + "name" : "delete-account", + "description" : "${role_delete-account}", + "composite" : false, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "14c23a18-ae2d-43c9-b0c0-aaf6e0c7f5b0", + "name" : "manage-account-links", + "description" : "${role_manage-account-links}", + "composite" : false, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "6fbe58af-d2fe-4d66-95fe-a2e8a818cb55", + "name" : "view-profile", + "description" : "${role_view-profile}", + "composite" : false, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "bdfd02bc-6f6a-47d2-82bc-0ca52d78ff48", + "name" : "manage-consent", + "description" : "${role_manage-consent}", + "composite" : true, + "composites" : { + "client" : { + "account" : [ "view-consent" ] + } + }, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "782f3b0c-a17b-4a87-988b-1a711401f3b0", + "name" : "manage-account", + "description" : "${role_manage-account}", + "composite" : true, + "composites" : { + "client" : { + "account" : [ "manage-account-links" ] + } + }, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + }, { + "id" : "8a3bfe15-66d9-4f3d-83ac-801d682d42b0", + "name" : "view-consent", + "description" : "${role_view-consent}", + "composite" : false, + "clientRole" : true, + "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "attributes" : { } + } ] + } + }, + "groups" : [ { + "id" : "d46f94c2-3b47-4288-b937-9cf918e54f0a", + "name" : "admins", + "path" : "/admins", + "attributes" : { }, + "realmRoles" : [ ], + "clientRoles" : { }, + "subGroups" : [ ] + }, { + "id" : "e992ce15-baac-48a0-8834-06f6fcf6c05b", + "name" : "curators", + "path" : "/curators", + "attributes" : { }, + "realmRoles" : [ ], + "clientRoles" : { }, + "subGroups" : [ ] + }, { + "id" : "531cf81d-a700-4336-808f-37a49709b48c", + "name" : "members", + "path" : "/members", + "attributes" : { }, + "realmRoles" : [ ], + "clientRoles" : { }, + "subGroups" : [ ] + } ], + "defaultRole" : { + "id" : "b4ff9091-ddf9-4536-b175-8cfa3e331d71", + "name" : "default-roles-test", + "description" : "${role_default-roles}", + "composite" : true, + "clientRole" : false, + "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983" + }, + "requiredCredentials" : [ "password" ], + "otpPolicyType" : "totp", + "otpPolicyAlgorithm" : "HmacSHA1", + "otpPolicyInitialCounter" : 0, + "otpPolicyDigits" : 6, + "otpPolicyLookAheadWindow" : 1, + "otpPolicyPeriod" : 30, + "otpSupportedApplications" : [ "FreeOTP", "Google Authenticator" ], + "webAuthnPolicyRpEntityName" : "keycloak", + "webAuthnPolicySignatureAlgorithms" : [ "ES256" ], + "webAuthnPolicyRpId" : "", + "webAuthnPolicyAttestationConveyancePreference" : "not specified", + "webAuthnPolicyAuthenticatorAttachment" : "not specified", + "webAuthnPolicyRequireResidentKey" : "not specified", + "webAuthnPolicyUserVerificationRequirement" : "not specified", + "webAuthnPolicyCreateTimeout" : 0, + "webAuthnPolicyAvoidSameAuthenticatorRegister" : false, + "webAuthnPolicyAcceptableAaguids" : [ ], + "webAuthnPolicyPasswordlessRpEntityName" : "keycloak", + "webAuthnPolicyPasswordlessSignatureAlgorithms" : [ "ES256" ], + "webAuthnPolicyPasswordlessRpId" : "", + "webAuthnPolicyPasswordlessAttestationConveyancePreference" : "not specified", + "webAuthnPolicyPasswordlessAuthenticatorAttachment" : "not specified", + "webAuthnPolicyPasswordlessRequireResidentKey" : "not specified", + "webAuthnPolicyPasswordlessUserVerificationRequirement" : "not specified", + "webAuthnPolicyPasswordlessCreateTimeout" : 0, + "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister" : false, + "webAuthnPolicyPasswordlessAcceptableAaguids" : [ ], + "users" : [ { + "id" : "52cddd46-251c-4534-acc8-0580eeafb577", + "createdTimestamp" : 1684736014759, + "username" : "admin", + "enabled" : true, + "totp" : false, + "emailVerified" : true, + "firstName" : "Dataverse", + "lastName" : "Admin", + "email" : "dataverse-admin@mailinator.com", + "credentials" : [ { + "id" : "28f1ece7-26fb-40f1-9174-5ffce7b85c0a", + "type" : "password", + "userLabel" : "Set to \"admin\"", + "createdDate" : 1684736057302, + "secretData" : "{\"value\":\"ONI7fl6BmooVTUgwN1W3m7hsRjMAYEr2l+Fp5+7IOYw1iIntwvZ3U3W0ZBcCFJ7uhcKqF101+rueM3dZfoshPQ==\",\"salt\":\"Hj7co7zYVei7xwx8EaYP3A==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-test" ], + "notBefore" : 0, + "groups" : [ "/admins" ] + }, { + "id" : "a3d8e76d-7e7b-42dc-bbd7-4258818a8a1b", + "createdTimestamp" : 1684755806552, + "username" : "affiliate", + "enabled" : true, + "totp" : false, + "emailVerified" : true, + "firstName" : "Dataverse", + "lastName" : "Affiliate", + "email" : "dataverse-affiliate@mailinator.com", + "credentials" : [ { + "id" : "31c8eb1e-b2a8-4f86-833b-7c0536cd61a1", + "type" : "password", + "userLabel" : "My password", + "createdDate" : 1684755821743, + "secretData" : "{\"value\":\"T+RQ4nvmjknj7ds8NU7782j6PJ++uCu98zNoDQjIe9IKXah+13q4EcXO9IHmi2BJ7lgT0OIzwIoac4JEQLxhjQ==\",\"salt\":\"fnRmE9WmjAp4tlvGh/bxxQ==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-test" ], + "notBefore" : 0, + "groups" : [ ] + }, { + "id" : "e5531496-cfb8-498c-a902-50c98d649e79", + "createdTimestamp" : 1684755721064, + "username" : "curator", + "enabled" : true, + "totp" : false, + "emailVerified" : true, + "firstName" : "Dataverse", + "lastName" : "Curator", + "email" : "dataverse-curator@mailinator.com", + "credentials" : [ { + "id" : "664546b4-b936-45cf-a4cf-5e98b743fc7f", + "type" : "password", + "userLabel" : "My password", + "createdDate" : 1684755740776, + "secretData" : "{\"value\":\"AvVqybCNtCBVAdLEeJKresy9tc3c4BBUQvu5uHVQw4IjVagN6FpKGlDEKOrxhzdSM8skEvthOEqJkloPo1w+NQ==\",\"salt\":\"2em2DDRRlNEYsNR3xDqehw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-test" ], + "notBefore" : 0, + "groups" : [ "/curators" ] + }, { + "id" : "c0082e7e-a3e9-45e6-95e9-811a34adce9d", + "createdTimestamp" : 1684755585802, + "username" : "user", + "enabled" : true, + "totp" : false, + "emailVerified" : true, + "firstName" : "Dataverse", + "lastName" : "User", + "email" : "dataverse-user@mailinator.com", + "credentials" : [ { + "id" : "00d6d67f-2e30-4da6-a567-bec38a1886a0", + "type" : "password", + "userLabel" : "My password", + "createdDate" : 1684755599597, + "secretData" : "{\"value\":\"z991rnjznAgosi5nX962HjM8/gN5GLJTdrlvi6G9cj8470X2/oZUb4Lka6s8xImgtEloCgWiKqH0EH9G4Y3a5A==\",\"salt\":\"/Uz7w+2IqDo+fQUGqxjVHw==\",\"additionalParameters\":{}}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "default-roles-test" ], + "notBefore" : 0, + "groups" : [ "/members" ] + } ], + "scopeMappings" : [ { + "clientScope" : "offline_access", + "roles" : [ "offline_access" ] + } ], + "clientScopeMappings" : { + "account" : [ { + "client" : "account-console", + "roles" : [ "manage-account" ] + } ] + }, + "clients" : [ { + "id" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4", + "clientId" : "account", + "name" : "${client_account}", + "rootUrl" : "${authBaseUrl}", + "baseUrl" : "/realms/test/account/", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ "/realms/test/account/*" ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { + "post.logout.redirect.uris" : "+" + }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "5d99f721-027c-478d-867d-61114e0a8192", + "clientId" : "account-console", + "name" : "${client_account-console}", + "rootUrl" : "${authBaseUrl}", + "baseUrl" : "/realms/test/account/", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ "/realms/test/account/*" ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { + "post.logout.redirect.uris" : "+", + "pkce.code.challenge.method" : "S256" + }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "protocolMappers" : [ { + "id" : "e181a0ce-9a04-4468-a38a-aaef9f78f989", + "name" : "audience resolve", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-audience-resolve-mapper", + "consentRequired" : false, + "config" : { } + } ], + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "5eccc178-121e-4d0f-bcb2-04ae3c2e52ed", + "clientId" : "admin-cli", + "name" : "${client_admin-cli}", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : false, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : true, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "b57d62bb-77ff-42bd-b8ff-381c7288f327", + "clientId" : "broker", + "name" : "${client_broker}", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : true, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : false, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "dada0ae8-ee9f-415a-9685-42da7c563660", + "clientId" : "realm-management", + "name" : "${client_realm-management}", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : true, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : false, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "bf7cf550-3875-4f97-9878-b2419a854058", + "clientId" : "security-admin-console", + "name" : "${client_security-admin-console}", + "rootUrl" : "${authAdminUrl}", + "baseUrl" : "/admin/test/console/", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "redirectUris" : [ "/admin/test/console/*" ], + "webOrigins" : [ "+" ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { + "post.logout.redirect.uris" : "+", + "pkce.code.challenge.method" : "S256" + }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "protocolMappers" : [ { + "id" : "ff845e16-e200-4894-ab51-37d8b9f2a445", + "name" : "locale", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "locale", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "locale", + "jsonType.label" : "String" + } + } ], + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "9c27faa8-4b8d-4ad9-9cd1-880032ef06aa", + "clientId" : "test", + "name" : "A Test Client", + "description" : "Use for hacking and testing away a confidential client", + "rootUrl" : "", + "adminUrl" : "", + "baseUrl" : "", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "secret" : "94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8", + "redirectUris" : [ "*" ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : true, + "serviceAccountsEnabled" : false, + "publicClient" : false, + "frontchannelLogout" : true, + "protocol" : "openid-connect", + "attributes" : { + "oidc.ciba.grant.enabled" : "false", + "client.secret.creation.time" : "1684735831", + "backchannel.logout.session.required" : "true", + "display.on.consent.screen" : "false", + "oauth2.device.authorization.grant.enabled" : "false", + "backchannel.logout.revoke.offline.tokens" : "false" + }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : true, + "nodeReRegistrationTimeout" : -1, + "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + } ], + "clientScopes" : [ { + "id" : "72f29e57-92fa-437b-828c-2b9d6fe56192", + "name" : "address", + "description" : "OpenID Connect built-in scope: address", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${addressScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "59581aea-70d6-4ee8-bec2-1fea5fc497ae", + "name" : "address", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-address-mapper", + "consentRequired" : false, + "config" : { + "user.attribute.formatted" : "formatted", + "user.attribute.country" : "country", + "user.attribute.postal_code" : "postal_code", + "userinfo.token.claim" : "true", + "user.attribute.street" : "street", + "id.token.claim" : "true", + "user.attribute.region" : "region", + "access.token.claim" : "true", + "user.attribute.locality" : "locality" + } + } ] + }, { + "id" : "f515ec81-3c1b-4d4d-b7a2-e7e8d47b6447", + "name" : "roles", + "description" : "OpenID Connect scope for add user roles to the access token", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "false", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${rolesScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "26d299a8-69e2-4864-9595-17a5b417fc61", + "name" : "realm roles", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-realm-role-mapper", + "consentRequired" : false, + "config" : { + "user.attribute" : "foo", + "access.token.claim" : "true", + "claim.name" : "realm_access.roles", + "jsonType.label" : "String", + "multivalued" : "true" + } + }, { + "id" : "d2998083-a8db-4f4e-9aaa-9cad68d65b97", + "name" : "audience resolve", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-audience-resolve-mapper", + "consentRequired" : false, + "config" : { } + }, { + "id" : "7a4cb2e5-07a0-4c16-a024-71df7ddd6868", + "name" : "client roles", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-client-role-mapper", + "consentRequired" : false, + "config" : { + "user.attribute" : "foo", + "access.token.claim" : "true", + "claim.name" : "resource_access.${client_id}.roles", + "jsonType.label" : "String", + "multivalued" : "true" + } + } ] + }, { + "id" : "8f1eafef-92d6-434e-b9ec-6edec1fddd0a", + "name" : "offline_access", + "description" : "OpenID Connect built-in scope: offline_access", + "protocol" : "openid-connect", + "attributes" : { + "consent.screen.text" : "${offlineAccessScopeConsentText}", + "display.on.consent.screen" : "true" + } + }, { + "id" : "c03095aa-b656-447a-9767-0763c2ccb070", + "name" : "acr", + "description" : "OpenID Connect scope for add acr (authentication context class reference) to the token", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "false", + "display.on.consent.screen" : "false" + }, + "protocolMappers" : [ { + "id" : "948b230c-56d0-4000-937c-841cd395d3f9", + "name" : "acr loa level", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-acr-mapper", + "consentRequired" : false, + "config" : { + "id.token.claim" : "true", + "access.token.claim" : "true" + } + } ] + }, { + "id" : "cdf35f63-8ec7-41a0-ae12-f05d415818cc", + "name" : "phone", + "description" : "OpenID Connect built-in scope: phone", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${phoneScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "ba4348ff-90b1-4e09-89a8-e5c08b04d3d1", + "name" : "phone number", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "phoneNumber", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "phone_number", + "jsonType.label" : "String" + } + }, { + "id" : "e6cceae5-8392-4348-b302-f610ece6056e", + "name" : "phone number verified", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "phoneNumberVerified", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "phone_number_verified", + "jsonType.label" : "boolean" + } + } ] + }, { + "id" : "4318001c-2970-41d3-91b9-e31c08569872", + "name" : "email", + "description" : "OpenID Connect built-in scope: email", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${emailScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "406d02a6-866a-4962-8838-e8c58ada1505", + "name" : "email", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "email", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "email", + "jsonType.label" : "String" + } + }, { + "id" : "33baabc1-9bf2-42e4-8b8e-a53c13f0b744", + "name" : "email verified", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "emailVerified", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "email_verified", + "jsonType.label" : "boolean" + } + } ] + }, { + "id" : "5277a84f-d727-4c64-8432-d513127beee1", + "name" : "profile", + "description" : "OpenID Connect built-in scope: profile", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${profileScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "0a609875-2678-4056-93ef-dd5c03e6059d", + "name" : "given name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "firstName", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "given_name", + "jsonType.label" : "String" + } + }, { + "id" : "7c510d18-07ee-4b78-8acd-24b777d11b3c", + "name" : "website", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "website", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "website", + "jsonType.label" : "String" + } + }, { + "id" : "0bb6d0ea-195f-49e8-918c-c419a26a661c", + "name" : "username", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "username", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "preferred_username", + "jsonType.label" : "String" + } + }, { + "id" : "5f1e644c-1acf-440c-b1a6-b5f65bcebfd9", + "name" : "profile", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "profile", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "profile", + "jsonType.label" : "String" + } + }, { + "id" : "c710bdb2-6cfd-4f60-9c4e-730188fc62f7", + "name" : "family name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "lastName", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "family_name", + "jsonType.label" : "String" + } + }, { + "id" : "012d5038-0e13-42ba-9df7-2487c8e2eead", + "name" : "nickname", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "nickname", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "nickname", + "jsonType.label" : "String" + } + }, { + "id" : "21590b19-517d-4b6d-92f6-d4f71238677e", + "name" : "updated at", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "updatedAt", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "updated_at", + "jsonType.label" : "long" + } + }, { + "id" : "e4cddca7-1360-42f3-9854-da6cbe00c71e", + "name" : "birthdate", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "birthdate", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "birthdate", + "jsonType.label" : "String" + } + }, { + "id" : "afee328f-c64c-43e6-80d0-be2721c2ed0e", + "name" : "locale", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "locale", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "locale", + "jsonType.label" : "String" + } + }, { + "id" : "780a1e2c-5b63-46f4-a5bf-dc3fd8ce0cbb", + "name" : "full name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-full-name-mapper", + "consentRequired" : false, + "config" : { + "id.token.claim" : "true", + "access.token.claim" : "true", + "userinfo.token.claim" : "true" + } + }, { + "id" : "aeebffff-f776-427e-83ed-064707ffce57", + "name" : "zoneinfo", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "zoneinfo", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "zoneinfo", + "jsonType.label" : "String" + } + }, { + "id" : "b3e840a2-1794-4da1-bf69-31905cbff0d6", + "name" : "middle name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "middleName", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "middle_name", + "jsonType.label" : "String" + } + }, { + "id" : "0607e0e4-4f7f-4214-996d-3599772ce1c7", + "name" : "picture", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "picture", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "picture", + "jsonType.label" : "String" + } + }, { + "id" : "426a609b-4e28-4132-af0d-13297b8cb63a", + "name" : "gender", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "gender", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "gender", + "jsonType.label" : "String" + } + } ] + }, { + "id" : "a1ebde82-ce21-438f-a3ad-261d3eeb1c01", + "name" : "role_list", + "description" : "SAML role list", + "protocol" : "saml", + "attributes" : { + "consent.screen.text" : "${samlRoleListScopeConsentText}", + "display.on.consent.screen" : "true" + }, + "protocolMappers" : [ { + "id" : "64653ac7-7ffc-4f7c-a589-03e3b68bbd25", + "name" : "role list", + "protocol" : "saml", + "protocolMapper" : "saml-role-list-mapper", + "consentRequired" : false, + "config" : { + "single" : "false", + "attribute.nameformat" : "Basic", + "attribute.name" : "Role" + } + } ] + }, { + "id" : "aeb5b852-dfec-4e67-9d9e-104abe9b3bf2", + "name" : "web-origins", + "description" : "OpenID Connect scope for add allowed web origins to the access token", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "false", + "display.on.consent.screen" : "false", + "consent.screen.text" : "" + }, + "protocolMappers" : [ { + "id" : "e2fa8437-a0f1-46fc-af9c-c40fc09cd6a1", + "name" : "allowed web origins", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-allowed-origins-mapper", + "consentRequired" : false, + "config" : { } + } ] + }, { + "id" : "4fecd0d7-d4ad-457e-90f2-c7202bf01ff5", + "name" : "microprofile-jwt", + "description" : "Microprofile - JWT built-in scope", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "false" + }, + "protocolMappers" : [ { + "id" : "a9536634-a9f6-4ed5-a8e7-8379d3b002ca", + "name" : "upn", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "username", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "upn", + "jsonType.label" : "String" + } + }, { + "id" : "2ce1a702-9458-4926-9b8a-f82c07215755", + "name" : "groups", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-realm-role-mapper", + "consentRequired" : false, + "config" : { + "multivalued" : "true", + "user.attribute" : "foo", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "groups", + "jsonType.label" : "String" + } + } ] + } ], + "defaultDefaultClientScopes" : [ "role_list", "profile", "email", "roles", "web-origins", "acr" ], + "defaultOptionalClientScopes" : [ "offline_access", "address", "phone", "microprofile-jwt" ], + "browserSecurityHeaders" : { + "contentSecurityPolicyReportOnly" : "", + "xContentTypeOptions" : "nosniff", + "xRobotsTag" : "none", + "xFrameOptions" : "SAMEORIGIN", + "contentSecurityPolicy" : "frame-src 'self'; frame-ancestors 'self'; object-src 'none';", + "xXSSProtection" : "1; mode=block", + "strictTransportSecurity" : "max-age=31536000; includeSubDomains" + }, + "smtpServer" : { }, + "eventsEnabled" : false, + "eventsListeners" : [ "jboss-logging" ], + "enabledEventTypes" : [ ], + "adminEventsEnabled" : false, + "adminEventsDetailsEnabled" : false, + "identityProviders" : [ ], + "identityProviderMappers" : [ ], + "components" : { + "org.keycloak.services.clientregistration.policy.ClientRegistrationPolicy" : [ { + "id" : "8115796f-8f1f-4d6a-88f8-ca2938451260", + "name" : "Allowed Client Scopes", + "providerId" : "allowed-client-templates", + "subType" : "authenticated", + "subComponents" : { }, + "config" : { + "allow-default-scopes" : [ "true" ] + } + }, { + "id" : "044bd055-714d-478e-aa93-303d2161c427", + "name" : "Allowed Protocol Mapper Types", + "providerId" : "allowed-protocol-mappers", + "subType" : "authenticated", + "subComponents" : { }, + "config" : { + "allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-full-name-mapper" ] + } + }, { + "id" : "be465734-3b0f-4370-a144-73db756e23f8", + "name" : "Allowed Protocol Mapper Types", + "providerId" : "allowed-protocol-mappers", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper" ] + } + }, { + "id" : "42a2f64d-ac9e-4221-9cf6-40ff8c868629", + "name" : "Trusted Hosts", + "providerId" : "trusted-hosts", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "host-sending-registration-request-must-match" : [ "true" ], + "client-uris-must-match" : [ "true" ] + } + }, { + "id" : "7ca08915-6c33-454c-88f2-20e1d6553b26", + "name" : "Max Clients Limit", + "providerId" : "max-clients", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "max-clients" : [ "200" ] + } + }, { + "id" : "f01f2b6f-3f01-4d01-b2f4-70577c6f599c", + "name" : "Allowed Client Scopes", + "providerId" : "allowed-client-templates", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "allow-default-scopes" : [ "true" ] + } + }, { + "id" : "516d7f21-f21a-4690-831e-36ad313093b2", + "name" : "Consent Required", + "providerId" : "consent-required", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { } + }, { + "id" : "c79df6a0-d4d8-4866-b9e6-8ddb5d1bd38e", + "name" : "Full Scope Disabled", + "providerId" : "scope", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { } + } ], + "org.keycloak.userprofile.UserProfileProvider" : [ { + "id" : "cf47a21f-c8fb-42f2-9bff-feca967db183", + "providerId" : "declarative-user-profile", + "subComponents" : { }, + "config" : { } + } ], + "org.keycloak.keys.KeyProvider" : [ { + "id" : "6b4a2281-a9e8-43ab-aee7-190ae91b2842", + "name" : "aes-generated", + "providerId" : "aes-generated", + "subComponents" : { }, + "config" : { + "kid" : [ "47b9c2c2-32dc-4317-bd8b-1c4e5bb740ca" ], + "secret" : [ "9VWsVSqbj5zWa8Mq-rRzOw" ], + "priority" : [ "100" ] + } + }, { + "id" : "68e2d2b0-4976-480f-ab76-f84a17686b05", + "name" : "rsa-enc-generated", + "providerId" : "rsa-enc-generated", + "subComponents" : { }, + "config" : { + "privateKey" : [ "MIIEpQIBAAKCAQEAwuIcVVJDncorsQcFef4M/J9dsaNNmwEv/+4pCSZuco7IlA9uCfvwjYgfwQlWoCHCc7JFEtUOXhpLNR0SJ9w2eCC9A/0horjLmiVGU5sGACGrAxSgipt399k83mtkPBTikT1BXumPrX51ovdEPVPQSO0hIBwFn4ZDwA9P/00jNzzswyLC2UDdQrwIjm2xWjq1X82d8mL3+Yp8lF9qD1w305+XPiqCC+TUunKsuCQq5sddet+UoCDsFQyxsJi6cWJrryDvQmiDgM2wm68jn6hyzDE76J1az0wKEGqoMEwIy0juqZCyAqgsm3xA+zHpTcI3EyTwDGpMvWNJp8AWqXPNaQIDAQABAoIBAAethL1+n/6WpUBEaoHcVrq5/2+vo0+dfTyVZNKRFqtG0WOWPzOflFd1HZV7YVPuJI+uPi8ANmsnbh9YcaYg9JiTZ0hMZ++giBf0ID2hZxv995NyXnf7fkoFKghevYG+9mVPtHRmxKlKiPFWfHQjP1ACNKAD2UZdcdbzxicaIkPV/hP996mZA3xaaudggAJq7u/W67H2Q6ofGqW4TI5241d8T+6yobbvXRe4n8FKz4eK2aZv+N+zwh5JDMsJ8050+lCDsyoyakEPf+4veuPkewx4FemAiotDNcmoUQSDL26wLw8kk1uZ9JY0M88OL5pMyBuxTqy0F6BWBltq80mlefECgYEA4vZ8Agu2plXOzWASn0dyhCel3QoeUqNY8D8A+0vK9qWxUE9jMG13jAZmsL2I38SuwRN1DhJezbrn4QTuxTukxgSjLDv/pBp9UnXnCz/fg4yPTYsZ0zHqTMbwvdtfIzBHTCYyIJ+unxVYoenC0XZKSQXA3NN2zNqYpLhjStWdEZECgYEA29DznJxpDZsRUieRxFgZ+eRCjbQ9Q2A46preqMo1KOZ6bt9avxG3uM7pUC+UOeIizeRzxPSJ2SyptYPzdaNwKN3Lq+RhjHe1zYLngXb0CIQaRwNHqePxXF1sg0dTbmcxf+Co7yPG+Nd5nrQq9SQHC3tLTyL6x3VU/yAfMQqUklkCgYEAyVl8iGAV6RkE/4R04OOEv6Ng7WkVn6CUvYZXe5kw9YHnfWUAjS0AOrRPFAsBy+r0UgvN8+7uNjvTjPhQT5/rPVVN4WdVEyQA/E/m6j7/LvhbBaMbBRcqUnTHjNd6XoBtMCxOmkyvoShR2krE8AiuPHwjLoVXxsNDWhbO18wMrVECgYEAlmkICOXNzI2K8Jg62gse2yshjy0BrpSs3XtTWFPkxDPRGwSiZ5OMD10lsMSdvG3MOu5TeTWLDZvOFHJRqPFI0e3Sa7A+P4u6TwF/v8rRePJLuMO5ybo7cWRL2Bh6MlVSPZpQfjIQ+D0Y70uBCXS5jVW0VlYtG0Zh/qDQNxJyTyECgYEAuRINlZ0ag+1QTITapSatbFWd/KquGLpMjZyF4k5gVHs+4zHnnTi1YIDUInp1FJBqKD27z2byy7KFgbMBZQmsDs8i4fgzQrJHe3D4WFFHCjiClbeReejbas9bOnqhSQCiIy1Ck8vMAriAtctSA/g/qq6dQApSgcWaKvTVL2Ywa7E=" ], + "keyUse" : [ "ENC" ], + "certificate" : [ "MIIClzCCAX8CBgGIQhOIijANBgkqhkiG9w0BAQsFADAPMQ0wCwYDVQQDDAR0ZXN0MB4XDTIzMDUyMjA2MDczNloXDTMzMDUyMjA2MDkxNlowDzENMAsGA1UEAwwEdGVzdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMLiHFVSQ53KK7EHBXn+DPyfXbGjTZsBL//uKQkmbnKOyJQPbgn78I2IH8EJVqAhwnOyRRLVDl4aSzUdEifcNnggvQP9IaK4y5olRlObBgAhqwMUoIqbd/fZPN5rZDwU4pE9QV7pj61+daL3RD1T0EjtISAcBZ+GQ8APT/9NIzc87MMiwtlA3UK8CI5tsVo6tV/NnfJi9/mKfJRfag9cN9Oflz4qggvk1LpyrLgkKubHXXrflKAg7BUMsbCYunFia68g70Jog4DNsJuvI5+ocswxO+idWs9MChBqqDBMCMtI7qmQsgKoLJt8QPsx6U3CNxMk8AxqTL1jSafAFqlzzWkCAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAIEIfjqOr2m+8s2RR8VW/nBgOgu9HtPRda4qNhGbgBkZ8NDy7TwHqlHo1ujKW5RO438pRyLJmOibWN4a/rkUsSjin6vgy4l8KpQy+7a4cQCQHyl34TmPjbtiw1jKgiOjzRQY54NVwIJNMIMc1ZyQo4u0U30/FxgUv6akXfS5O1ePD+5xKOOC/Af9AletjhQMPwVxXDwFqfQf/p+SM4Pyn4L633MESfDrH8v9FjJd0lV5ZlEI4hpPtnbi9U+CInqCy3VDNlZjsXswaDRujjg3LERfOMvCgj+Dck3FzWG7EiCwXWNEPvdMzv4w7M6KXuiPPQkST8DUWjgkjUCeLBzT3yw==" ], + "priority" : [ "100" ], + "algorithm" : [ "RSA-OAEP" ] + } + }, { + "id" : "728769a3-99a4-4cca-959d-28181dfee7e8", + "name" : "rsa-generated", + "providerId" : "rsa-generated", + "subComponents" : { }, + "config" : { + "privateKey" : [ "MIIEowIBAAKCAQEAxIszQCv8bX3sKXJVtuLJV6cH/uhkzxcTEIcDe7y2Y2SFM0x2nF6wRLk8QkvIrRmelilegUIJttqZxLXMpxwUJGizehHQMrOCzNoGBZdVanoK7nNa5+FOYtlvL4GxNfwzS36sp3PnKQiGv5Q7RGuPthjLFfqTmYx/7GTDJC4vLEW5S01Vy/Xc9FE4FsT0hnm91lRWjppc9893M5QUy/TPu8udIuNV87Ko5yiIxQqcPiAQXJaN4CyGaDcYhhzzHdxVptIk2FvtxhpmNxrbtmBCx/o9/rBDQNTis8Ex6ItWC2PvC17UPvyOcZ4Fv/qO0L6JZ0mrpH95CeDU1kEP+KKZrwIDAQABAoIBAGGl6SYiVG1PyTQEXqqY/UCjt3jBnEg5ZhrpgWUKKrGyAO2uOSXSc5AJWfN0NHUwC9b+IbplhW8IJ6qQSmfiLu2x6S2mSQLPphZB4gkIGYNntCOpQ0p+aZP6BGAddt5j+VYyTvR5RKlh15S6QEHrkMB/i/LVBl0c7XeUzlEc8wnyj8DGvlmpcQzIcbWfqEZ/FciDdKGNN0M4V/r1uQiOUVZ69SWDBBwu41YwF7PYUsX83q8zn0nBeMqz0ggSf33lW4w31fox9c7EjIF01gPArE5uT+d+AwjVKHpd08LWGR9W9NSXVOPUKkzOM+PyvKGvzjMnlrm/feqowKQbL2q/GP0CgYEA/EsrvUojkFIWxHc19KJdJvqlYgLeWq6P/J7UmHgpl+S3nG6b9HH4/aM/ICDa5hxd5bmP5p2V3EuZWnyb6/QB5eipC7Ss3oM7XeS/PwvTp6NTC1fypx2zHKse3iuLeCGneRxiw15mB02ArJ/qJw/VSQK2J7RiR4+b6HYpdzQnIysCgYEAx25dTQqskQqsx/orJzuUqfNv/C0W4vqfz1eL3akFrdK+YqghXKFsDmh61JpTrTKnRLAdQeyOrhKwbNsdxSEEaeeLayKLVlimoFXGd/LZb5LQiwFcrvTzhnB+FLmFgqTnuLkpfY1woHEwSW9TpJewjbT9S6g0L2uh223nVXuLMY0CgYEA3pMOlmMGtvbEoTSuRBDNb2rmZm4zbfrcijgxRAWWZCtiFL68FU5LJLBVK2nw09sot1cabZCOuhdzxhFymRneZs73+5y8eV17DV2VnvA3HIiI5dQD/YzFDECm7ceqtiOylLUHKGZqSn0ETMaTkzxzpIKg4qxPm+RE3jMIZ+J5uJsCgYBk2iUIrtsxxgo2Xwavomu9vkPlbQ/j3QYwHn+2qqEalDZ/QbMNWvyAFMn49cpXDgSUsdM54V0OHpllkzFs3ROUUumoViHMmqw47OefBQp8Z+xaP2gVef4lAIJiDKe9t5MPUWPwADTyjgrzN/8+fw9juiFVv0wUpwOFKgEQs5diiQKBgC6RpZESc5Nl4nHrDvIl5n/zYED6BaXoLl15NhcoBudt5SIRO/RpvBW69A7aE/UK6p7WXjq4mP1ssIWz4KgATCoXUgYvn0a7Ql79r/CMce6/FvcuweED6u6bD0kdXuYhe8fR9IPmLfnnb4Cx3JOJeRZbiBSP5HOZJ7nsKibxcgPm" ], + "keyUse" : [ "SIG" ], + "certificate" : [ "MIIClzCCAX8CBgGIQhOHjjANBgkqhkiG9w0BAQsFADAPMQ0wCwYDVQQDDAR0ZXN0MB4XDTIzMDUyMjA2MDczNloXDTMzMDUyMjA2MDkxNlowDzENMAsGA1UEAwwEdGVzdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMSLM0Ar/G197ClyVbbiyVenB/7oZM8XExCHA3u8tmNkhTNMdpxesES5PEJLyK0ZnpYpXoFCCbbamcS1zKccFCRos3oR0DKzgszaBgWXVWp6Cu5zWufhTmLZby+BsTX8M0t+rKdz5ykIhr+UO0Rrj7YYyxX6k5mMf+xkwyQuLyxFuUtNVcv13PRROBbE9IZ5vdZUVo6aXPfPdzOUFMv0z7vLnSLjVfOyqOcoiMUKnD4gEFyWjeAshmg3GIYc8x3cVabSJNhb7cYaZjca27ZgQsf6Pf6wQ0DU4rPBMeiLVgtj7wte1D78jnGeBb/6jtC+iWdJq6R/eQng1NZBD/iima8CAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAe0Bo1UpGfpOlJiVhp0XWExm8bdxFgXOU2M5XeZBsWAqBehvJkzn+tbAtlVNiIiN58XFFpH+xLZ2nJIZR5FHeCD3bYAgK72j5k45HJI95vPyslelfT/m3Np78+1iUa1U1WxN40JaowP1EeTkk5O8Pk4zTQ1Ne1usmKd+SJxI1KWN0kKuVFMmdNRb5kQKWeQvOSlWl7rd4bvHGvVnxgcPC1bshEJKRt+VpaUjpm6CKd8C3Kt7IWfIX4HTVhKZkmLn7qv6aSfwWelwZfLdaXcLXixqzqNuUk/VWbF9JT4iiag9F3mt7xryIkoRp1AEjCA82HqK72F4JCFyOhCiGrMfKJw==" ], + "priority" : [ "100" ] + } + }, { + "id" : "f30af2d2-d042-43b8-bc6d-22f6bab6934c", + "name" : "hmac-generated", + "providerId" : "hmac-generated", + "subComponents" : { }, + "config" : { + "kid" : [ "6f0d9688-e974-42b4-9d84-8d098c51007c" ], + "secret" : [ "8nruwD66Revr9k21e-BHtcyvNzAMFOsstxSAB0Gdy2qe2qGRm2kYOwsPzrH9ZQSdj2041SraKo6a3SHvCyTBAQ" ], + "priority" : [ "100" ], + "algorithm" : [ "HS256" ] + } + } ] + }, + "internationalizationEnabled" : false, + "supportedLocales" : [ ], + "authenticationFlows" : [ { + "id" : "94c65ba1-ba50-4be2-94c4-de656145eb67", + "alias" : "Account verification options", + "description" : "Method with which to verity the existing account", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "idp-email-verification", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "ALTERNATIVE", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "Verify Existing Account by Re-authentication", + "userSetupAllowed" : false + } ] + }, { + "id" : "3b706ddf-c4b6-498a-803c-772878bc9bc3", + "alias" : "Authentication Options", + "description" : "Authentication options.", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "basic-auth", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "basic-auth-otp", + "authenticatorFlow" : false, + "requirement" : "DISABLED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "auth-spnego", + "authenticatorFlow" : false, + "requirement" : "DISABLED", + "priority" : 30, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "9ea0b8f6-882c-45ad-9110-78adf5a5d233", + "alias" : "Browser - Conditional OTP", + "description" : "Flow to determine if the OTP is required for the authentication", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "auth-otp-form", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "99c5ba83-b585-4601-b740-1a26670bf4e9", + "alias" : "Direct Grant - Conditional OTP", + "description" : "Flow to determine if the OTP is required for the authentication", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "direct-grant-validate-otp", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "65b73dec-7dd1-4de8-b542-a023b7104afc", + "alias" : "First broker login - Conditional OTP", + "description" : "Flow to determine if the OTP is required for the authentication", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "auth-otp-form", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "9a26b76f-da95-43f1-8da3-16c4a0654f07", + "alias" : "Handle Existing Account", + "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "idp-confirm-link", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "Account verification options", + "userSetupAllowed" : false + } ] + }, { + "id" : "0a77285e-d7d5-4b6c-aa9a-3eadb5e7e3d3", + "alias" : "Reset - Conditional OTP", + "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "reset-otp", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "cb6c0b3b-2f5f-4493-9d14-6130f8b58dd7", + "alias" : "User creation or linking", + "description" : "Flow for the existing/non-existing user alternatives", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticatorConfig" : "create unique user config", + "authenticator" : "idp-create-user-if-unique", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "ALTERNATIVE", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "Handle Existing Account", + "userSetupAllowed" : false + } ] + }, { + "id" : "0fd3db1b-e93d-4768-82ca-a1498ddc11d0", + "alias" : "Verify Existing Account by Re-authentication", + "description" : "Reauthentication of existing account", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "idp-username-password-form", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "CONDITIONAL", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "First broker login - Conditional OTP", + "userSetupAllowed" : false + } ] + }, { + "id" : "86610e70-f9f5-4c11-8a9e-9de1770565fb", + "alias" : "browser", + "description" : "browser based authentication", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "auth-cookie", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "auth-spnego", + "authenticatorFlow" : false, + "requirement" : "DISABLED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "identity-provider-redirector", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 25, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "ALTERNATIVE", + "priority" : 30, + "autheticatorFlow" : true, + "flowAlias" : "forms", + "userSetupAllowed" : false + } ] + }, { + "id" : "f6aa23dd-8532-4d92-9780-3ea226481e3b", + "alias" : "clients", + "description" : "Base authentication for clients", + "providerId" : "client-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "client-secret", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "client-jwt", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "client-secret-jwt", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 30, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "client-x509", + "authenticatorFlow" : false, + "requirement" : "ALTERNATIVE", + "priority" : 40, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "4d2caf65-1703-4ddb-8890-70232e91bcd8", + "alias" : "direct grant", + "description" : "OpenID Connect Resource Owner Grant", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "direct-grant-validate-username", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "direct-grant-validate-password", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "CONDITIONAL", + "priority" : 30, + "autheticatorFlow" : true, + "flowAlias" : "Direct Grant - Conditional OTP", + "userSetupAllowed" : false + } ] + }, { + "id" : "eaa20c41-5334-4fb4-8c45-fb9cc71f7f74", + "alias" : "docker auth", + "description" : "Used by Docker clients to authenticate against the IDP", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "docker-http-basic-authenticator", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "b9febfb1-f0aa-4590-b782-272a4aa11575", + "alias" : "first broker login", + "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticatorConfig" : "review profile config", + "authenticator" : "idp-review-profile", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "User creation or linking", + "userSetupAllowed" : false + } ] + }, { + "id" : "03bb6ff4-eccb-4f2f-8953-3769f78c3bf3", + "alias" : "forms", + "description" : "Username, password, otp and other auth forms.", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "auth-username-password-form", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "CONDITIONAL", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "Browser - Conditional OTP", + "userSetupAllowed" : false + } ] + }, { + "id" : "38385189-246b-4ea0-ac05-d49dfe1709da", + "alias" : "http challenge", + "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "no-cookie-redirect", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : true, + "flowAlias" : "Authentication Options", + "userSetupAllowed" : false + } ] + }, { + "id" : "1022f3c2-0469-41c9-861e-918908f103df", + "alias" : "registration", + "description" : "registration flow", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "registration-page-form", + "authenticatorFlow" : true, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : true, + "flowAlias" : "registration form", + "userSetupAllowed" : false + } ] + }, { + "id" : "00d36c3b-e1dc-41f8-bfd0-5f8c80ea07e8", + "alias" : "registration form", + "description" : "registration form", + "providerId" : "form-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "registration-user-creation", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "registration-profile-action", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 40, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "registration-password-action", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 50, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "registration-recaptcha-action", + "authenticatorFlow" : false, + "requirement" : "DISABLED", + "priority" : 60, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + }, { + "id" : "4374c16e-8c65-4168-94c2-df1ab3f3e6ad", + "alias" : "reset credentials", + "description" : "Reset credentials for a user if they forgot their password or something", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "reset-credentials-choose-user", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "reset-credential-email", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 20, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticator" : "reset-password", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 30, + "autheticatorFlow" : false, + "userSetupAllowed" : false + }, { + "authenticatorFlow" : true, + "requirement" : "CONDITIONAL", + "priority" : 40, + "autheticatorFlow" : true, + "flowAlias" : "Reset - Conditional OTP", + "userSetupAllowed" : false + } ] + }, { + "id" : "04d6ed6a-76c9-41fb-9074-bff8a80c2286", + "alias" : "saml ecp", + "description" : "SAML ECP Profile Authentication Flow", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "http-basic-authenticator", + "authenticatorFlow" : false, + "requirement" : "REQUIRED", + "priority" : 10, + "autheticatorFlow" : false, + "userSetupAllowed" : false + } ] + } ], + "authenticatorConfig" : [ { + "id" : "e7bad67d-1236-430a-a327-9194f9d1e2b0", + "alias" : "create unique user config", + "config" : { + "require.password.update.after.registration" : "false" + } + }, { + "id" : "287b5989-a927-4cf5-8067-74594ce19bc1", + "alias" : "review profile config", + "config" : { + "update.profile.on.first.login" : "missing" + } + } ], + "requiredActions" : [ { + "alias" : "CONFIGURE_TOTP", + "name" : "Configure OTP", + "providerId" : "CONFIGURE_TOTP", + "enabled" : true, + "defaultAction" : false, + "priority" : 10, + "config" : { } + }, { + "alias" : "terms_and_conditions", + "name" : "Terms and Conditions", + "providerId" : "terms_and_conditions", + "enabled" : false, + "defaultAction" : false, + "priority" : 20, + "config" : { } + }, { + "alias" : "UPDATE_PASSWORD", + "name" : "Update Password", + "providerId" : "UPDATE_PASSWORD", + "enabled" : true, + "defaultAction" : false, + "priority" : 30, + "config" : { } + }, { + "alias" : "UPDATE_PROFILE", + "name" : "Update Profile", + "providerId" : "UPDATE_PROFILE", + "enabled" : true, + "defaultAction" : false, + "priority" : 40, + "config" : { } + }, { + "alias" : "VERIFY_EMAIL", + "name" : "Verify Email", + "providerId" : "VERIFY_EMAIL", + "enabled" : true, + "defaultAction" : false, + "priority" : 50, + "config" : { } + }, { + "alias" : "delete_account", + "name" : "Delete Account", + "providerId" : "delete_account", + "enabled" : false, + "defaultAction" : false, + "priority" : 60, + "config" : { } + }, { + "alias" : "webauthn-register", + "name" : "Webauthn Register", + "providerId" : "webauthn-register", + "enabled" : true, + "defaultAction" : false, + "priority" : 70, + "config" : { } + }, { + "alias" : "webauthn-register-passwordless", + "name" : "Webauthn Register Passwordless", + "providerId" : "webauthn-register-passwordless", + "enabled" : true, + "defaultAction" : false, + "priority" : 80, + "config" : { } + }, { + "alias" : "update_user_locale", + "name" : "Update User Locale", + "providerId" : "update_user_locale", + "enabled" : true, + "defaultAction" : false, + "priority" : 1000, + "config" : { } + } ], + "browserFlow" : "browser", + "registrationFlow" : "registration", + "directGrantFlow" : "direct grant", + "resetCredentialsFlow" : "reset credentials", + "clientAuthenticationFlow" : "clients", + "dockerAuthenticationFlow" : "docker auth", + "attributes" : { + "cibaBackchannelTokenDeliveryMode" : "poll", + "cibaAuthRequestedUserHint" : "login_hint", + "oauth2DevicePollingInterval" : "5", + "clientOfflineSessionMaxLifespan" : "0", + "clientSessionIdleTimeout" : "0", + "clientOfflineSessionIdleTimeout" : "0", + "cibaInterval" : "5", + "cibaExpiresIn" : "120", + "oauth2DeviceCodeLifespan" : "600", + "parRequestUriLifespan" : "60", + "clientSessionMaxLifespan" : "0", + "frontendUrl" : "" + }, + "keycloakVersion" : "19.0.3", + "userManagedAccessAllowed" : false, + "clientProfiles" : { + "profiles" : [ ] + }, + "clientPolicies" : { + "policies" : [ ] + } +} \ No newline at end of file diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 30c55661a20..f00be57ea9c 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -100,8 +100,8 @@ services: - /mail:mode=770,size=128M,uid=1000,gid=1000 dev_keycloak: - container_name: "dev_keycloack" - image: 'quay.io/keycloak/keycloak:19.0' + container_name: "dev_keycloak" + image: 'quay.io/keycloak/keycloak:21.0' hostname: keycloak environment: - KEYCLOAK_ADMIN=kcadmin @@ -116,7 +116,7 @@ services: ports: - "8090:8090" volumes: - - './conf/keycloak/oidc-realm.json:/opt/keycloak/data/import/oidc-realm.json' + - './conf/keycloak/test-realm.json:/opt/keycloak/data/import/test-realm.json' networks: dataverse: From 947ed780d8a3300c9202e62853a0b810b1039b49 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 16:10:40 +0200 Subject: [PATCH 087/396] docs(dev,oidc): update new OIDC test realm description --- .../source/developers/remote-users.rst | 27 +++++++++++++++---- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/developers/remote-users.rst b/doc/sphinx-guides/source/developers/remote-users.rst index d8f90e9257f..a07f104d1c8 100755 --- a/doc/sphinx-guides/source/developers/remote-users.rst +++ b/doc/sphinx-guides/source/developers/remote-users.rst @@ -39,7 +39,7 @@ STOP! ``oidc-keycloak-auth-provider.json`` was changed from http://localhost:809 If you are working on the OpenID Connect (OIDC) user authentication flow, you do not need to connect to a remote provider (as explained in :doc:`/installation/oidc`) to test this feature. Instead, you can use the available configuration that allows you to run a test Keycloak OIDC identity management service locally through a Docker container. -(Please note! The client secret (``ss6gE8mODCDfqesQaSG3gwUwZqZt547E``) is hard-coded in ``oidc-realm.json`` and ``oidc-keycloak-auth-provider.json``. Do not use this config in production! This is only for developers.) +(Please note! The client secret (``94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8``) is hard-coded in ``test-realm.json`` and ``oidc-keycloak-auth-provider.json``. Do not use this config in production! This is only for developers.) You can find this configuration in ``conf/keycloak``. There are two options available in this directory to run a Keycloak container: bash script or docker-compose. @@ -55,15 +55,32 @@ Now load the configuration defined in ``oidc-keycloak-auth-provider.json`` into You should see the new provider, called "OIDC-Keycloak", under "Other options" on the Log In page. -You should be able to log into Keycloak with the following credentials: - -- username: kcuser -- password: kcpassword +You should be able to log into Keycloak with the one of the following credentials: + +.. list-table:: + + * - Username + - Password + - Group + * - admin + - admin + - admins + * - curator + - curator + - curators + * - user + - user + - members + * - affiliate + - affiliate + - \- In case you want to stop and remove the Keycloak container, just run the other available bash script: ``./rm-keycloak.sh`` +Note: the Keycloak admin to login at the admin console is ``kcadmin:kcpassword`` + ---- Previous: :doc:`unf/index` | Next: :doc:`geospatial` From 23a11718618fcc15daa3d6ddcfb18f0f49378e9d Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 16:13:47 +0200 Subject: [PATCH 088/396] test(oidc): update OIDC integration test with new realm This commit enables using Keycloak 20+ with this test by replacing the test realm and using the client to retrieve the access token on a side channel in a Keycloak 20+ compatible way. (The old one only worked for v19, this one is compatible with all) --- .../OIDCAuthenticationProviderFactoryIT.java | 71 ++++++++++++------- 1 file changed, 47 insertions(+), 24 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java index a5aa29cc083..88f70c53948 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -8,6 +8,8 @@ import edu.harvard.iq.dataverse.api.auth.doubles.BearerTokenKeyContainerRequestTestFake; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception; +import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.mocks.MockAuthenticatedUser; @@ -17,6 +19,7 @@ import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.keycloak.OAuth2Constants; import org.keycloak.admin.client.Keycloak; import org.keycloak.admin.client.KeycloakBuilder; import org.mockito.InjectMocks; @@ -32,6 +35,7 @@ import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientSecret; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assumptions.assumeFalse; import static org.junit.jupiter.api.Assumptions.assumeTrue; @@ -47,18 +51,19 @@ @JvmSetting(key = JvmSettings.OIDC_AUTH_SERVER_URL, method = "getAuthUrl") class OIDCAuthenticationProviderFactoryIT { - // NOTE: the following values are taken from the realm import file! - static final String clientId = "oidc-client"; - static final String clientSecret = "ss6gE8mODCDfqesQaSG3gwUwZqZt547E"; - static final String realm = "oidc-realm"; - static final String adminUser = "kcuser"; + static final String clientId = "test"; + static final String clientSecret = "94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8"; + static final String realm = "test"; + static final String realmAdminUser = "admin"; + static final String realmAdminPassword = "admin"; + + static final String adminUser = "kcadmin"; static final String adminPassword = "kcpassword"; - static final String clientIdAdminCli = "admin-cli"; - // The realm JSON resides in conf/keycloak/oidc-realm.json and gets avail here using in pom.xml + // The realm JSON resides in conf/keycloak/test-realm.json and gets avail here using in pom.xml @Container - static KeycloakContainer keycloakContainer = new KeycloakContainer("quay.io/keycloak/keycloak:19.0") - .withRealmImportFile("keycloak/oidc-realm.json") + static KeycloakContainer keycloakContainer = new KeycloakContainer("quay.io/keycloak/keycloak:21.0") + .withRealmImportFile("keycloak/test-realm.json") .withAdminUsername(adminUser) .withAdminPassword(adminPassword); @@ -76,31 +81,44 @@ OIDCAuthProvider getProvider() throws Exception { return oidcAuthProvider; } - Keycloak getAdminClient() { - return KeycloakBuilder.builder() + // NOTE: This requires the "direct access grants" for the client to be enabled! + String getBearerTokenViaKeycloakAdminClient() throws Exception { + try (Keycloak keycloak = KeycloakBuilder.builder() .serverUrl(keycloakContainer.getAuthServerUrl()) + .grantType(OAuth2Constants.PASSWORD) .realm(realm) - .clientId(clientIdAdminCli) - .username(keycloakContainer.getAdminUsername()) - .password(keycloakContainer.getAdminPassword()) - .build(); - } - - String getBearerToken() throws Exception { - Keycloak keycloak = getAdminClient(); - return keycloak.tokenManager().getAccessTokenString(); + .clientId(clientId) + .clientSecret(clientSecret) + .username(realmAdminUser) + .password(realmAdminPassword) + .scope("openid") + .build()) { + return keycloak.tokenManager().getAccessTokenString(); + } } + /** + * This basic test covers configuring an OIDC provider via MPCONFIG and being able to use it. + */ @Test void testCreateProvider() throws Exception { + // given OIDCAuthProvider oidcAuthProvider = getProvider(); - String token = getBearerToken(); + String token = getBearerTokenViaKeycloakAdminClient(); assumeFalse(token == null); - Optional info = oidcAuthProvider.getUserInfo(new BearerAccessToken(token)); + Optional info = Optional.empty(); + + // when + try { + info = oidcAuthProvider.getUserInfo(new BearerAccessToken(token)); + } catch (OAuth2Exception e) { + System.out.println(e.getMessageBody()); + } + //then assertTrue(info.isPresent()); - assertEquals(adminUser, info.get().getPreferredUsername()); + assertEquals(realmAdminUser, info.get().getPreferredUsername()); } @Mock @@ -111,6 +129,11 @@ void testCreateProvider() throws Exception { @InjectMocks BearerTokenAuthMechanism bearerTokenAuthMechanism; + /** + * This test covers using an OIDC provider as authorization party when accessing the Dataverse API with a + * Bearer Token. See {@link BearerTokenAuthMechanism}. It needs to mock the auth services to avoid adding + * more dependencies. + */ @Test @JvmSetting(key = JvmSettings.FEATURE_FLAG, varArgs = "api-bearer-auth", value = "true") void testApiBearerAuth() throws Exception { @@ -120,7 +143,7 @@ void testApiBearerAuth() throws Exception { // given // Get the access token from the remote Keycloak in the container - String accessToken = getBearerToken(); + String accessToken = getBearerTokenViaKeycloakAdminClient(); assumeFalse(accessToken == null); OIDCAuthProvider oidcAuthProvider = getProvider(); From 844339dd6adc74977a6011f58cc01dd924b002cc Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 22 May 2023 16:14:52 +0200 Subject: [PATCH 089/396] test(oidc): add third OIDC integration test for JSF login Using HtmlUnit as a browser replacement, enable testing if the Authorization Code Flow used when logging in via the UI works properly. --- pom.xml | 6 ++ .../OIDCAuthenticationProviderFactoryIT.java | 78 +++++++++++++++++++ 2 files changed, 84 insertions(+) diff --git a/pom.xml b/pom.xml index e9a9b9dd611..2ba01e5ab61 100644 --- a/pom.xml +++ b/pom.xml @@ -610,6 +610,12 @@ ${smallrye-mpconfig.version} test + + org.htmlunit + htmlunit + 3.2.0 + test + diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java index 88f70c53948..2c963e8df46 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -16,6 +16,13 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.testing.JvmSetting; import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; +import org.htmlunit.FailingHttpStatusCodeException; +import org.htmlunit.WebClient; +import org.htmlunit.WebResponse; +import org.htmlunit.html.HtmlForm; +import org.htmlunit.html.HtmlInput; +import org.htmlunit.html.HtmlPage; +import org.htmlunit.html.HtmlSubmitInput; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -28,8 +35,11 @@ import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; +import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.regex.Pattern; +import java.util.stream.Collectors; import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientId; import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientSecret; @@ -166,4 +176,72 @@ void testApiBearerAuth() throws Exception { assertNotNull(lookedUpUser); assertEquals(user, lookedUpUser); } + + /** + * This test covers the {@link OIDCAuthProvider#buildAuthzUrl(String, String)} and + * {@link OIDCAuthProvider#getUserRecord(String, String, String)} methods that are used when + * a user authenticates via the JSF UI. It covers enabling PKCE, which is no hard requirement + * by the protocol, but might be required by some provider (as seen with Microsoft Azure AD). + * As we don't have a real browser, we use {@link WebClient} from HtmlUnit as a replacement. + */ + @Test + @JvmSetting(key = JvmSettings.OIDC_PKCE_ENABLED, value = "true") + void testAuthorizationCodeFlowWithPKCE() throws Exception { + // given + String state = "foobar"; + String callbackUrl = "http://localhost:8080/oauth2callback.xhtml"; + + OIDCAuthProvider oidcAuthProvider = getProvider(); + String authzUrl = oidcAuthProvider.buildAuthzUrl(state, callbackUrl); + //System.out.println(authzUrl); + + try (WebClient webClient = new WebClient()) { + webClient.getOptions().setCssEnabled(false); + webClient.getOptions().setJavaScriptEnabled(false); + // We *want* to know about the redirect, as it contains the data we need! + webClient.getOptions().setRedirectEnabled(false); + + HtmlPage loginPage = webClient.getPage(authzUrl); + assumeTrue(loginPage.getTitleText().contains("Sign in to " + realm)); + + HtmlForm form = loginPage.getForms().get(0); + HtmlInput username = form.getInputByName("username"); + HtmlInput password = form.getInputByName("password"); + HtmlSubmitInput submit = form.getInputByName("login"); + + username.type(realmAdminUser); + password.type(realmAdminPassword); + + FailingHttpStatusCodeException exception = assertThrows(FailingHttpStatusCodeException.class, submit::click); + assertEquals(302, exception.getStatusCode()); + + WebResponse response = exception.getResponse(); + assertNotNull(response); + + String callbackLocation = response.getResponseHeaderValue("Location"); + assertTrue(callbackLocation.startsWith(callbackUrl)); + //System.out.println(callbackLocation); + + String queryPart = callbackLocation.trim().split("\\?")[1]; + Map parameters = Pattern.compile("\\s*&\\s*") + .splitAsStream(queryPart) + .map(s -> s.split("=", 2)) + .collect(Collectors.toMap(a -> a[0], a -> a.length > 1 ? a[1]: "")); + //System.out.println(map); + assertTrue(parameters.containsKey("code")); + assertTrue(parameters.containsKey("state")); + + OAuth2UserRecord userRecord = oidcAuthProvider.getUserRecord( + parameters.get("code"), + parameters.get("state"), + callbackUrl + ); + + assertNotNull(userRecord); + assertEquals(realmAdminUser, userRecord.getUsername()); + } catch (OAuth2Exception e) { + System.out.println(e.getMessageBody()); + throw e; + } + } } \ No newline at end of file From 033d8e9f58cd39d8a697354facffc0d76abf59bd Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 23 May 2023 23:12:00 +0200 Subject: [PATCH 090/396] chore(build): update many Maven plugins to latest available versions This is mostly because with Maven 3.9 it starts to verify if a plugin is going to be compatible with Maven 4. Most plugins are being updated at the moment. --- modules/dataverse-parent/pom.xml | 28 ++++++++++++++++++++-------- pom.xml | 2 -- 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 060fc22b4d2..97e83684d1a 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -174,19 +174,21 @@ 5.7.0 ${junit.jupiter.version} 2.28.2 + 0.8.10 9.3 - 3.8.1 - 3.2.2 + 3.11.0 + 3.3.0 3.3.2 - 3.2.0 - 3.0.0-M1 - 3.0.0-M5 - 3.0.0-M5 - 3.3.0 - 3.1.2 + 3.5.0 + 3.1.1 + 3.1.0 + 3.1.0 + 3.6.0 + 3.3.1 + 3.2.2 0.42.1 @@ -245,6 +247,11 @@ maven-failsafe-plugin ${maven-failsafe-plugin.version} + + org.apache.maven.plugins + maven-resources-plugin + ${maven-resources-plugin.version} + org.apache.maven.plugins maven-checkstyle-plugin @@ -257,6 +264,11 @@ + + org.jacoco + jacoco-maven-plugin + ${maven-jacoco-plugin.version} + io.fabric8 docker-maven-plugin diff --git a/pom.xml b/pom.xml index 2ba01e5ab61..28fc2b25af5 100644 --- a/pom.xml +++ b/pom.xml @@ -26,7 +26,6 @@ 1.2.18.4 8.5.10 1.20.1 - 0.8.7 5.2.1 2.4.1 5.5.3 @@ -704,7 +703,6 @@ org.jacoco jacoco-maven-plugin - ${jacoco.version} ${basedir}/target/coverage-reports/jacoco-unit.exec ${basedir}/target/coverage-reports/jacoco-unit.exec From f112fe46e93133e7736bfcb018623b07eebee776 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 00:05:01 +0200 Subject: [PATCH 091/396] build,test: reconfigure JaCoCo test coverage measuring With the addition of integration tests we need to create a combined report of unit tests and integration tests (for now still keeping API tests out of the loop for normal cases). This commit reconfigures the JaCoCo plugin with multiple executions at certain times to execute the measurements and merge the results together. --- pom.xml | 72 ++++++++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 66 insertions(+), 6 deletions(-) diff --git a/pom.xml b/pom.xml index 28fc2b25af5..5d1523e01b8 100644 --- a/pom.xml +++ b/pom.xml @@ -703,23 +703,78 @@ org.jacoco jacoco-maven-plugin - - ${basedir}/target/coverage-reports/jacoco-unit.exec - ${basedir}/target/coverage-reports/jacoco-unit.exec - jacoco-initialize prepare-agent + + ${project.build.directory}/coverage-reports/jacoco-unit.exec + surefire.jacoco.args + - jacoco-site - package + jacoco-after-unit + test report + + ${project.build.directory}/coverage-reports/jacoco-unit.exec + ${project.reporting.outputDirectory}/jacoco-unit-test-coverage-report + + + + jacoco-initialize-it + pre-integration-test + + prepare-agent + + + ${project.build.directory}/coverage-reports/jacoco-integration.exec + failsafe.jacoco.args + + + + jacoco-after-it + post-integration-test + + report + + + ${project.build.directory}/coverage-reports/jacoco-integration.exec + ${project.reporting.outputDirectory}/jacoco-integration-test-coverage-report + + + + jacoco-merge-unit-and-it + post-integration-test + + merge + + + + + ${project.build.directory}/coverage-reports/ + + *.exec + + + + ${project.build.directory}/coverage-reports/merged.exec + + + + jacoco-report + post-integration-test + + report + + + ${project.build.directory}/coverage-reports/merged.exec + ${project.reporting.outputDirectory}/jacoco-merged-test-coverage-report + @@ -734,6 +789,9 @@ 2.3.1 + + ${project.reporting.outputDirectory}/jacoco-merged-test-coverage-report/jacoco.xml + org.apache.maven.plugins @@ -743,6 +801,7 @@ ${testsToExclude} ${skipUnitTests} + ${surefire.jacoco.args} ${argLine} @@ -751,6 +810,7 @@ maven-failsafe-plugin testcontainers + ${failsafe.jacoco.args} ${argLine} From 5975e268ef1150ef76edd589e1ce35a74e683ccf Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 00:23:54 +0200 Subject: [PATCH 092/396] ci: restructure Maven tests to include integration tests --- .github/workflows/maven_unit_test.yml | 78 +++++++++++++++++++++++++-- 1 file changed, 75 insertions(+), 3 deletions(-) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 629a45a6c2c..8d51702be6b 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -33,6 +33,7 @@ jobs: continue-on-error: ${{ matrix.experimental }} runs-on: ubuntu-latest steps: + # Basic setup chores - uses: actions/checkout@v2 - name: Set up JDK ${{ matrix.jdk }} uses: actions/setup-java@v2 @@ -45,13 +46,84 @@ jobs: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: ${{ runner.os }}-m2 + - name: Build with Maven run: mvn -DcompilerArgument=-Xlint:unchecked -Dtarget.java.version=${{ matrix.jdk }} -P all-unit-tests clean test - - name: Maven Code Coverage - env: + + # Store the build for the next step (integration test) to avoid recompilation and to transfer coverage reports + - run: tar -cvf java-builddir.tar target + - uses: actions/upload-artifact@v3 + with: + name: java-builddir + path: java-builddir.tar + retention-days: 3 + integration-test: + runs-on: ubuntu-latest + needs: unittest + steps: + # Basic setup chores + - uses: actions/checkout@v3 + - name: Set up JDK ${{ matrix.jdk }} + uses: actions/setup-java@v2 + with: + java-version: ${{ matrix.jdk }} + distribution: 'adopt' + - name: Cache Maven packages + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: ${{ runner.os }}-m2 + + # Get the build output from the unit test job + - uses: actions/download-artifact@v3 + with: + name: java-builddir + - run: tar -xvf java-builddir.tar; ls -laR target + + # Run integration tests + - run: mvn -Dtarget.java.version=${{ matrix.jdk }} verify + + # Wrap up and send to coverage job + - run: tar -cvf java-builddir.tar target + - uses: actions/upload-artifact@v3 + with: + name: java-builddir + path: java-builddir.tar + retention-days: 3 + coverage-report: + runs-on: ubuntu-latest + needs: integration-test + steps: + # Basic setup chores + - uses: actions/checkout@v3 + - name: Set up JDK ${{ matrix.jdk }} + uses: actions/setup-java@v2 + with: + java-version: ${{ matrix.jdk }} + distribution: 'adopt' + - name: Cache Maven packages + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: ${{ runner.os }}-m2 + + # Get the build output from the integration test job + - uses: actions/download-artifact@v3 + with: + name: java-builddir + - run: tar -xvf java-builddir.tar; ls -laR target + + # Deposit Code Coverage + - name: Maven Code Coverage + env: CI_NAME: github COVERALLS_SECRET: ${{ secrets.GITHUB_TOKEN }} - run: mvn -V -B jacoco:report coveralls:report -DrepoToken=${COVERALLS_SECRET} -DpullRequest=${{ github.event.number }} + run: mvn -V -B coveralls:report -DrepoToken=${COVERALLS_SECRET} -DpullRequest=${{ github.event.number }} + + # NOTE: this may be extended with adding a report to the build output, leave a comment, send to Sonarcloud, ... + push-app-img: name: Publish App Image permissions: From 3b7aa106d118f7cecaefd7680b7a6dd7df2fc670 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 00:29:47 +0200 Subject: [PATCH 093/396] chore(ci): update actions/setup-java to v3 for Maven workflow --- .github/workflows/maven_unit_test.yml | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 8d51702be6b..1ff08705e36 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -34,9 +34,9 @@ jobs: runs-on: ubuntu-latest steps: # Basic setup chores - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: java-version: ${{ matrix.jdk }} distribution: 'adopt' @@ -64,7 +64,7 @@ jobs: # Basic setup chores - uses: actions/checkout@v3 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: java-version: ${{ matrix.jdk }} distribution: 'adopt' @@ -97,10 +97,9 @@ jobs: steps: # Basic setup chores - uses: actions/checkout@v3 - - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v2 + - uses: actions/setup-java@v3 with: - java-version: ${{ matrix.jdk }} + java-version: '11' distribution: 'adopt' - name: Cache Maven packages uses: actions/cache@v2 From 2c0a6aabf5852a441009aca39625e8a71eed66b9 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 00:30:13 +0200 Subject: [PATCH 094/396] ci(test): fix missing build matrix for integration test job --- .github/workflows/maven_unit_test.yml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 1ff08705e36..3c1c5e7e3e2 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -60,6 +60,23 @@ jobs: integration-test: runs-on: ubuntu-latest needs: unittest + name: (${{ matrix.status}} / JDK ${{ matrix.jdk }}) Integration Tests + strategy: + fail-fast: false + matrix: + jdk: [ '11' ] + experimental: [ false ] + status: [ "Stable" ] + # + # JDK 17 builds disabled due to non-essential fails marking CI jobs as completely failed within + # Github Projects, PR lists etc. This was consensus on Slack #dv-tech. See issue #8094 + # (This is a limitation of how Github is currently handling these things.) + # + #include: + # - jdk: '17' + # experimental: true + # status: "Experimental" + continue-on-error: ${{ matrix.experimental }} steps: # Basic setup chores - uses: actions/checkout@v3 From 8a6f23166870c9460c635535cb021029d45509f5 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 00:43:36 +0200 Subject: [PATCH 095/396] ci(test): tweak artifact upload and job names --- .github/workflows/maven_unit_test.yml | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 3c1c5e7e3e2..a4753f24668 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -1,4 +1,4 @@ -name: Maven Unit Tests +name: Maven Tests on: push: @@ -96,21 +96,22 @@ jobs: - uses: actions/download-artifact@v3 with: name: java-builddir - - run: tar -xvf java-builddir.tar; ls -laR target + - run: tar -xvf java-builddir.tar - # Run integration tests - - run: mvn -Dtarget.java.version=${{ matrix.jdk }} verify + # Run integration tests (but not unit tests again) + - run: mvn -DskipUnitTests -Dtarget.java.version=${{ matrix.jdk }} verify # Wrap up and send to coverage job - - run: tar -cvf java-builddir.tar target + - run: tar -cvf java-reportdir.tar target/site - uses: actions/upload-artifact@v3 with: - name: java-builddir - path: java-builddir.tar + name: java-reportdir + path: java-reportdir.tar retention-days: 3 coverage-report: runs-on: ubuntu-latest needs: integration-test + name: Coverage Report Submission steps: # Basic setup chores - uses: actions/checkout@v3 @@ -128,8 +129,8 @@ jobs: # Get the build output from the integration test job - uses: actions/download-artifact@v3 with: - name: java-builddir - - run: tar -xvf java-builddir.tar; ls -laR target + name: java-reportdir + - run: tar -xvf java-reportdir.tar # Deposit Code Coverage - name: Maven Code Coverage From edef7d5bd52f3625b2e15521046d97a73f999894 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 01:20:04 +0200 Subject: [PATCH 096/396] docs(config): add OIDC to JVM options list with backlink --- doc/sphinx-guides/source/installation/config.rst | 7 +++++++ doc/sphinx-guides/source/installation/oidc.rst | 2 ++ 2 files changed, 9 insertions(+) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index f8aef8c59da..4b1a8bd14b3 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2420,6 +2420,13 @@ Can also be set via any `supported MicroProfile Config API source`_, e.g. the en ``DATAVERSE_UI_SHOW_VALIDITY_FILTER``. Will accept ``[tT][rR][uU][eE]|1|[oO][nN]`` as "true" expressions. +dataverse.auth.oidc.* ++++++++++++++++++++++ + +Provision a single :doc:`OpenID Connect authentication provider ` using MicroProfile Config. You can find a list of +all available options at :ref:`oidc-mpconfig`. + + .. _feature-flags: Feature Flags diff --git a/doc/sphinx-guides/source/installation/oidc.rst b/doc/sphinx-guides/source/installation/oidc.rst index 9848d73b189..0dc5ca4ff4a 100644 --- a/doc/sphinx-guides/source/installation/oidc.rst +++ b/doc/sphinx-guides/source/installation/oidc.rst @@ -116,6 +116,8 @@ The Dataverse installation will automatically try to load the provider and retri You should see the new provider under "Other options" on the Log In page, as described in the :doc:`/user/account` section of the User Guide. +.. _oidc-mpconfig: + Provision via MPCONFIG ^^^^^^^^^^^^^^^^^^^^^^ From e91a046a790506e959ffcc9f71d27f6207b4d4e1 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 01:51:04 +0200 Subject: [PATCH 097/396] feat(oidc): replace map-based cache of PKCE verifiers Instead of using an unlimited growth Map of verifiers as a cache, we now will use a real evicting cache implementation to limit the size and age of entries. This will limit resource waste and mitigate an unlikely but present attack vector of pumping up the cache to DDoS us. --- pom.xml | 7 +++++++ .../providers/oauth2/oidc/OIDCAuthProvider.java | 14 ++++++++++++-- .../harvard/iq/dataverse/settings/JvmSettings.java | 2 ++ .../META-INF/microprofile-config.properties | 4 ++++ 4 files changed, 25 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 5d1523e01b8..0484a80ddd9 100644 --- a/pom.xml +++ b/pom.xml @@ -386,6 +386,13 @@ oauth2-oidc-sdk 10.9.1 + + + com.github.ben-manes.caffeine + caffeine + 3.1.6 + + io.gdcc diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index 52362f7abeb..818332ea282 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -1,5 +1,7 @@ package edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc; +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; import com.github.scribejava.core.builder.api.DefaultApi20; import com.nimbusds.oauth2.sdk.AuthorizationCode; import com.nimbusds.oauth2.sdk.AuthorizationCodeGrant; @@ -36,10 +38,13 @@ import edu.harvard.iq.dataverse.authorization.providers.oauth2.AbstractOAuth2AuthenticationProvider; import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception; import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.IOException; import java.net.URI; +import java.time.Duration; +import java.time.temporal.ChronoUnit; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -69,8 +74,13 @@ public class OIDCAuthProvider extends AbstractOAuth2AuthenticationProvider { /** * Using PKCE, we create and send a special {@link CodeVerifier}. This contains a secret * we need again when verifying the response by the provider, thus the cache. + * To be absolutely sure this may not be abused to DDoS us and not let unused verifiers rot, + * use an evicting cache implementation and not a standard map. */ - private final Map verifierCache = new ConcurrentHashMap<>(); + private final Cache verifierCache = Caffeine.newBuilder() + .maximumSize(JvmSettings.OIDC_PKCE_CACHE_MAXSIZE.lookup(Integer.class)) + .expireAfterWrite(Duration.of(JvmSettings.OIDC_PKCE_CACHE_MAXAGE.lookup(Integer.class), ChronoUnit.SECONDS)) + .build(); public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEndpointURL, boolean pkceEnabled, String pkceMethod) throws AuthorizationSetupException { @@ -201,7 +211,7 @@ public String buildAuthzUrl(String state, String callbackUrl) { public OAuth2UserRecord getUserRecord(String code, String state, String redirectUrl) throws IOException, OAuth2Exception { // Retrieve the verifier from the cache and clear from the cache. If not found, will be null. // Will be sent to token endpoint for verification, so if required but missing, will lead to exception. - CodeVerifier verifier = verifierCache.remove(state); + CodeVerifier verifier = verifierCache.getIfPresent(state); // Create grant object - again, this is null-safe for the verifier AuthorizationGrant codeGrant = new AuthorizationCodeGrant( diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index 144be72c91a..1122b64c139 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -125,6 +125,8 @@ public enum JvmSettings { SCOPE_OIDC_PKCE(SCOPE_OIDC, "pkce"), OIDC_PKCE_ENABLED(SCOPE_OIDC_PKCE, "enabled"), OIDC_PKCE_METHOD(SCOPE_OIDC_PKCE, "method"), + OIDC_PKCE_CACHE_MAXSIZE(SCOPE_OIDC_PKCE, "max-cache-size"), + OIDC_PKCE_CACHE_MAXAGE(SCOPE_OIDC_PKCE, "max-cache-age"), // UI SETTINGS SCOPE_UI(PREFIX, "ui"), diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index 3e166d0527f..38a4d8df0ab 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -54,3 +54,7 @@ dataverse.pid.datacite.rest-api-url=https://api.test.datacite.org # Handle.Net dataverse.pid.handlenet.index=300 + +# AUTHENTICATION +dataverse.auth.oidc.pkce.max-cache-size=10000 +dataverse.auth.oidc.pkce.max-cache-age=300 \ No newline at end of file From 8d4a75e8236298d787fdf738512c615c13c3654e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 01:51:39 +0200 Subject: [PATCH 098/396] docs(oidc): describe new config options for PKCE verifier cache --- doc/sphinx-guides/source/installation/oidc.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/doc/sphinx-guides/source/installation/oidc.rst b/doc/sphinx-guides/source/installation/oidc.rst index 0dc5ca4ff4a..e036e9c8470 100644 --- a/doc/sphinx-guides/source/installation/oidc.rst +++ b/doc/sphinx-guides/source/installation/oidc.rst @@ -168,3 +168,12 @@ The following options are available: - A subtitle, currently not displayed by the UI. - N - ``OpenID Connect`` + * - ``dataverse.auth.oidc.pkce.max-cache-size`` + - Tune the maximum size of all OIDC providers' verifier cache (= number of outstanding PKCE-enabled auth responses). + - N + - 10000 + * - ``dataverse.auth.oidc.pkce.max-cache-age`` + - Tune the maximum age of all OIDC providers' verifier cache entries. Default is 5 minutes, equivalent to lifetime + of many OIDC access tokens. + - N + - 300 \ No newline at end of file From 8ca25fff4f4e1801c97ff28ba1947e4d05bfc915 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 24 May 2023 01:56:47 +0200 Subject: [PATCH 099/396] docs(oidc,test): add release note --- .../9268-8349-oidc-improvements.md | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 doc/release-notes/9268-8349-oidc-improvements.md diff --git a/doc/release-notes/9268-8349-oidc-improvements.md b/doc/release-notes/9268-8349-oidc-improvements.md new file mode 100644 index 00000000000..cb0a9685c69 --- /dev/null +++ b/doc/release-notes/9268-8349-oidc-improvements.md @@ -0,0 +1,28 @@ +## OpenID Connect Authentication Provider Improvements + +### Using MicroProfile Config For Provisioning + +With this release it is possible to provision a single OIDC-based authentication provider +by using MicroProfile Config instead of or in addition to the classic Admin API provisioning. + +If you are using an external OIDC provider component as identity management system and/or broker +to other authentication providers such as Google, eduGain SAML and so on, this might make your +life easier during instance setups and reconfiguration. You no longer need to generate the +necessary JSON file. + +### Adding PKCE Support + +Some OIDC providers require using PKCE as additional security layer. As of this version, you can enable +support for this on any OIDC provider you configure. (Note that OAuth2 providers have not been upgraded.) + +## Improved Testing + +With this release, we add a new type of testing to Dataverse: integration tests which are no end-to-end tests +like our API tests. Starting with OIDC authentication support, we test regularly on CI for working condition +of both OIDC login options in UI and API. + +The testing and development Keycloak realm has been updated with more users and compatibility with Keycloak 21. + +The support for setting JVM options during testing has been improved for developers. You now may add the +`@JvmSetting` annotation to classes (also inner classes) and reference factory methods for values. This improvement is +also paving the way to enable manipulating JVM options during end-to-end tests on remote ends. From 8b2937e80e53528106d12ea7e8ed7204c7b7ee2c Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 25 May 2023 17:35:23 -0400 Subject: [PATCH 100/396] Add call to populate file lists --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index b8d2507bc6b..c857d00a27d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -370,6 +370,7 @@ public void setShowIngestSuccess(boolean showIngestSuccess) { public void setTermsGuestbookPopupAction(String popupAction){ if(popupAction != null && popupAction.length() > 0){ + logger.info("TGPA set to " + popupAction); this.termsGuestbookPopupAction = popupAction; } @@ -5178,6 +5179,9 @@ public boolean isFileAccessRequestMultiButtonEnabled(){ if (!isSessionUserAuthenticated() || !dataset.isFileAccessRequest()){ return false; } + //populate file lists + filterSelectedFiles(); + if( this.selectedRestrictedFiles == null || this.selectedRestrictedFiles.isEmpty() ){ return false; } From 7ddc0466a767b292e8dfe6625feea770b112ded5 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 25 May 2023 17:35:42 -0400 Subject: [PATCH 101/396] hide accept terms buttons in download case --- src/main/webapp/guestbook-terms-popup-fragment.xhtml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index 5616fa48280..bdaa6d92432 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -41,7 +41,7 @@
    -
    +
    @@ -51,6 +51,7 @@
    + Date: Thu, 25 May 2023 17:49:17 -0400 Subject: [PATCH 102/396] handle changes from #6919 --- src/main/webapp/guestbook-terms-popup-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index bdaa6d92432..9c3391ef9ae 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -60,7 +60,7 @@ From 90186edc47772a2aa39089dc1607caecf3c3917a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 25 May 2023 19:47:11 -0400 Subject: [PATCH 103/396] fix query --- .../edu/harvard/iq/dataverse/DataFileServiceBean.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index 449e8d351c6..f4a33e4f12f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -861,10 +861,11 @@ private List retrieveFileAccessRequesters(DataFile fileIn) { // List requesters = em.createNativeQuery("select authenticated_user_id // from fileaccessrequests where datafile_id = // "+fileIn.getId()).getResultList(); - List requesters = em.createNativeQuery("select authenticated_user_id from fileaccessrequests where datafile_id = " + fileIn.getId() + " and request_state='CREATED'").getResultList(); - - for (Object userIdObj : requesters) { - Long userId = (Long) userIdObj; + TypedQuery typedQuery = em.createQuery("select f.user.id from FileAccessRequest f where f.dataFile.id = :file_id and f.requestState= :requestState", Long.class); + typedQuery.setParameter("file_id", fileIn.getId()); + typedQuery.setParameter("requestState", FileAccessRequest.RequestState.CREATED); + List requesters = typedQuery.getResultList(); + for (Long userId : requesters) { AuthenticatedUser user = userService.find(userId); if (user != null) { retList.add(user); From 728c886444161cd23ad060ab8665961f4452ace6 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 01:16:08 +0200 Subject: [PATCH 104/396] ci: add installed gdcc packages to Maven run artifact --- .github/workflows/maven_unit_test.yml | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 51e3c5188f6..cc918e16d97 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -60,11 +60,15 @@ jobs: - run: rm -rf ~/.m2/repository/edu/harvard/iq/dataverse # Store the build for the next step (integration test) to avoid recompilation and to transfer coverage reports - - run: tar -cvf java-builddir.tar target + - run: | + tar -cvf java-builddir.tar target + tar -cvf java-m2-selection.tar ~/.m2/repository/io/gdcc/dataverse-* - uses: actions/upload-artifact@v3 with: - name: java-builddir - path: java-builddir.tar + name: java-artifacts + path: | + java-builddir.tar + java-m2-selection.tar retention-days: 3 integration-test: @@ -100,11 +104,12 @@ jobs: # Get the build output from the unit test job - uses: actions/download-artifact@v3 with: - name: java-builddir - - run: tar -xvf java-builddir.tar + name: java-artifacts + - run: | + tar -xvf java-builddir.tar + tar -xvf java-m2-selection.tar -C / # Run integration tests (but not unit tests again) - # TODO - adopt to parent module - run: mvn -DskipUnitTests -Dtarget.java.version=${{ matrix.jdk }} verify # Wrap up and send to coverage job @@ -148,6 +153,7 @@ jobs: jacoco:report coveralls:report # NOTE: this may be extended with adding a report to the build output, leave a comment, send to Sonarcloud, ... + push-app-img: name: Publish App Image permissions: From 77242777e2afa68335884d9eed2f68e1be750102 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 08:39:03 +0200 Subject: [PATCH 105/396] ci: some more fixes for Maven Tests --- .github/workflows/maven_unit_test.yml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index cc918e16d97..5a5e55f82aa 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -47,12 +47,14 @@ jobs: cache: maven # The reason why we use "install" here is that we want the submodules to be available in the next step. - # Also, we can cache them this way for jobs triggered by this one. - - name: Build with Maven + # Also, we can cache them this way for jobs triggered by this one. We need to skip ITs here, as we run + # them in the next job - but install usually runs through verify phase. + - name: Build with Maven and run unit tests run: > mvn -B -f modules/dataverse-parent -Dtarget.java.version=${{ matrix.jdk }} -DcompilerArgument=-Xlint:unchecked -P all-unit-tests + -DskipIntegrationTests -pl edu.harvard.iq:dataverse -am install @@ -127,10 +129,9 @@ jobs: steps: # Basic setup chores - uses: actions/checkout@v3 - - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v3 + - uses: actions/setup-java@v3 with: - java-version: ${{ matrix.jdk }} + java-version: '11' distribution: temurin cache: maven @@ -141,7 +142,7 @@ jobs: - run: tar -xvf java-reportdir.tar # Deposit Code Coverage - - name: Maven Code Coverage + - name: Deposit Code Coverage env: CI_NAME: github COVERALLS_SECRET: ${{ secrets.GITHUB_TOKEN }} From 0287e037e6b76be83006c8447a7007d2386aea91 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 08:46:48 +0200 Subject: [PATCH 106/396] build: add config to enable skipping jacoco and failsafe tests --- pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pom.xml b/pom.xml index fd92ccdc06e..2aa74657422 100644 --- a/pom.xml +++ b/pom.xml @@ -747,6 +747,7 @@ ${project.build.directory}/coverage-reports/jacoco-integration.exec failsafe.jacoco.args + ${skipIntegrationTests} @@ -758,6 +759,7 @@ ${project.build.directory}/coverage-reports/jacoco-integration.exec ${project.reporting.outputDirectory}/jacoco-integration-test-coverage-report + ${skipIntegrationTests} @@ -776,6 +778,7 @@ ${project.build.directory}/coverage-reports/merged.exec + ${skipIntegrationTests} @@ -787,6 +790,7 @@ ${project.build.directory}/coverage-reports/merged.exec ${project.reporting.outputDirectory}/jacoco-merged-test-coverage-report + ${skipIntegrationTests} @@ -824,6 +828,7 @@ testcontainers ${failsafe.jacoco.args} ${argLine} + ${skipIntegrationTests} From 28ddc3797b60eafb2b4144bb06610dc1caadf54f Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 11:08:25 +0200 Subject: [PATCH 107/396] fix(auth,oidc): do not add null verifier to cache Will throw an NPE otherwise. --- .../authorization/providers/oauth2/oidc/OIDCAuthProvider.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java index 818332ea282..5eb2b391eb7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java @@ -191,7 +191,9 @@ public String buildAuthzUrl(String state, String callbackUrl) { // Cache the PKCE verifier, as we need the secret in it for verification later again, after the client sends us // the auth code! We use the state to cache the verifier, as the state is unique per authentication event. - this.verifierCache.put(state, pkceVerifier); + if (pkceVerifier != null) { + this.verifierCache.put(state, pkceVerifier); + } return req.toURI().toString(); } From 749c13be13c6c6e3c0f8bea26674e488cd62d6ab Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 11:10:09 +0200 Subject: [PATCH 108/396] feat(ct,oidc,auth): add dev keycloak connection to compose file --- docker-compose-dev.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index e3f93b77d4a..d582a6375f9 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -13,6 +13,10 @@ services: - DATAVERSE_DB_PASSWORD=secret - DATAVERSE_DB_USER=${DATAVERSE_DB_USER} - DATAVERSE_FEATURE_API_BEARER_AUTH=1 + - DATAVERSE_AUTH_OIDC_ENABLED=1 + - DATAVERSE_AUTH_OIDC_CLIENT_ID=test + - DATAVERSE_AUTH_OIDC_CLIENT_SECRET=94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8 + - DATAVERSE_AUTH_OIDC_AUTH_SERVER_URL=http://keycloak.mydomain.com:8090/realms/test ports: - "8080:8080" # HTTP (Dataverse Application) - "4848:4848" # HTTP (Payara Admin Console) From 849df5d2214a630fe70dc177c7188f09b4ae782b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 6 Jun 2023 13:04:30 +0200 Subject: [PATCH 109/396] docs,fix(oidc): fix API auth docs example with new test realm --- doc/sphinx-guides/source/api/auth.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/auth.rst b/doc/sphinx-guides/source/api/auth.rst index bbc81b595e3..eced7afbbcf 100644 --- a/doc/sphinx-guides/source/api/auth.rst +++ b/doc/sphinx-guides/source/api/auth.rst @@ -77,6 +77,6 @@ To test if bearer tokens are working, you can try something like the following ( .. code-block:: bash - export TOKEN=`curl -s -X POST --location "http://keycloak.mydomain.com:8090/realms/oidc-realm/protocol/openid-connect/token" -H "Content-Type: application/x-www-form-urlencoded" -d "username=kcuser&password=kcpassword&grant_type=password&client_id=oidc-client&client_secret=ss6gE8mODCDfqesQaSG3gwUwZqZt547E" | jq '.access_token' -r | tr -d "\n"` + export TOKEN=`curl -s -X POST --location "http://keycloak.mydomain.com:8090/realms/test/protocol/openid-connect/token" -H "Content-Type: application/x-www-form-urlencoded" -d "username=user&password=user&grant_type=password&client_id=test&client_secret=94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8" | jq '.access_token' -r | tr -d "\n"` curl -H "Authorization: Bearer $TOKEN" http://localhost:8080/api/users/:me From 98e5b3fbae8871ef0fecbd0550ad8fefb00e2b22 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Jun 2023 12:34:37 +0200 Subject: [PATCH 110/396] fix(ct): enable sane default for upload storage location in containers The default from microprofile-config.properties does NOT work, as the location must already be resolvable while the servlet is being initialized - the app shipped defaults file is not yet read at this point. This is similar to the database options, which must be set using one of the other Payara included config sources. (Non-easily resolvable timing issue). The solution for containers is to add an env var to the docker file, which can be overriden by any env var from compose or K8s etc. (Problem is the high ordinal of the env source though) --- src/main/docker/Dockerfile | 4 +++- src/main/resources/META-INF/microprofile-config.properties | 1 - 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile index 88020a118b5..f64e88cb414 100644 --- a/src/main/docker/Dockerfile +++ b/src/main/docker/Dockerfile @@ -27,7 +27,9 @@ FROM $BASE_IMAGE # Make Payara use the "ct" profile for MicroProfile Config. Will switch various defaults for the application # setup in META-INF/microprofile-config.properties. # See also https://download.eclipse.org/microprofile/microprofile-config-3.0/microprofile-config-spec-3.0.html#configprofile -ENV MP_CONFIG_PROFILE=ct +ENV MP_CONFIG_PROFILE=ct \ + # NOTE: this cannot be provided as default from microprofile-config.properties as not yet avail when servlet starts + DATAVERSE_FILES_UPLOADS="${STORAGE_DIR}/uploads" # Copy app and deps from assembly in proper layers COPY --chown=payara:payara maven/deps ${DEPLOY_DIR}/dataverse/WEB-INF/lib/ diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index 7c16495f870..748ed6de55a 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -12,7 +12,6 @@ dataverse.build= dataverse.files.directory=/tmp/dataverse # The variables are replaced with the environment variables from our base image, but still easy to override %ct.dataverse.files.directory=${STORAGE_DIR} -%ct.dataverse.files.uploads=${STORAGE_DIR}/uploads # SEARCH INDEX dataverse.solr.host=localhost From d71cdf2d427011fc660794bb12afbab9db1c2bc7 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Jun 2023 16:07:03 +0200 Subject: [PATCH 111/396] fix(ct,conf): switch to different approach to default upload location Instead of trying to provide a default using STORAGE_DIR env var from microprofile-config.properties as before, using this env var reference in glassfish-web.xml directly now. By defaulting to "." if not present (as in classic installations), it is fully equivalent to the former hardcoded default value. Providing a synced variant of it in microprofile-config.properties and leaving a hint about the pitfalls, we can reuse the setting for other purposes within the codebase as well (and expect the same behaviour because same defaults). --- src/main/docker/Dockerfile | 4 +--- src/main/resources/META-INF/microprofile-config.properties | 6 ++++++ src/main/webapp/WEB-INF/glassfish-web.xml | 2 +- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile index f64e88cb414..88020a118b5 100644 --- a/src/main/docker/Dockerfile +++ b/src/main/docker/Dockerfile @@ -27,9 +27,7 @@ FROM $BASE_IMAGE # Make Payara use the "ct" profile for MicroProfile Config. Will switch various defaults for the application # setup in META-INF/microprofile-config.properties. # See also https://download.eclipse.org/microprofile/microprofile-config-3.0/microprofile-config-spec-3.0.html#configprofile -ENV MP_CONFIG_PROFILE=ct \ - # NOTE: this cannot be provided as default from microprofile-config.properties as not yet avail when servlet starts - DATAVERSE_FILES_UPLOADS="${STORAGE_DIR}/uploads" +ENV MP_CONFIG_PROFILE=ct # Copy app and deps from assembly in proper layers COPY --chown=payara:payara maven/deps ${DEPLOY_DIR}/dataverse/WEB-INF/lib/ diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index 748ed6de55a..f3745126cb2 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -12,6 +12,12 @@ dataverse.build= dataverse.files.directory=/tmp/dataverse # The variables are replaced with the environment variables from our base image, but still easy to override %ct.dataverse.files.directory=${STORAGE_DIR} +# NOTE: the following uses STORAGE_DIR for both containers and classic installations. By defaulting to "." if not +# present, it equals the hardcoded default from before again. Also, be aware that this props file cannot provide +# any value for lookups in glassfish-web.xml during servlet initialization, as this file will not have +# been read yet! The names and their values are in sync here and over there to ensure the config checker +# is able to check for the directories (exist + writeable). +dataverse.files.uploads=${STORAGE_DIR:.}/uploads # SEARCH INDEX dataverse.solr.host=localhost diff --git a/src/main/webapp/WEB-INF/glassfish-web.xml b/src/main/webapp/WEB-INF/glassfish-web.xml index e56d7013abf..8041ebd4447 100644 --- a/src/main/webapp/WEB-INF/glassfish-web.xml +++ b/src/main/webapp/WEB-INF/glassfish-web.xml @@ -18,5 +18,5 @@ This folder is not only holding compiled JSP pages but also the place where file streams are stored during uploads. As Dataverse does not use any JSP, there will only be uploads stored here. --> - + From a4ec3a66e76aa1559aea0c05cedc2da2b38d7b03 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Jun 2023 16:44:08 +0200 Subject: [PATCH 112/396] feat(conf): introduce ConfigCheckService to validate config on startup #9572 Starting with important local storage locations for the Dataverse application, this service uses EJB startup mechanisms to verify configuration bits on startup. Checks for the temp storage location and JSF upload location as crucial parts of the app, which, if not exist or write protected, while only cause errors and failures on the first data upload attempt. This is not desirable as it might cause users to be blocked. --- .../settings/ConfigCheckService.java | 65 +++++++++++++++++++ .../iq/dataverse/settings/JvmSettings.java | 1 + .../harvard/iq/dataverse/util/FileUtil.java | 29 ++++----- 3 files changed, 77 insertions(+), 18 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java new file mode 100644 index 00000000000..4ba028903b0 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java @@ -0,0 +1,65 @@ +package edu.harvard.iq.dataverse.settings; + +import edu.harvard.iq.dataverse.util.FileUtil; + +import javax.annotation.PostConstruct; +import javax.ejb.DependsOn; +import javax.ejb.Singleton; +import javax.ejb.Startup; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +@Startup +@Singleton +@DependsOn("StartupFlywayMigrator") +public class ConfigCheckService { + + private static final Logger logger = Logger.getLogger(ConfigCheckService.class.getCanonicalName()); + + public static class ConfigurationError extends RuntimeException { + public ConfigurationError(String message) { + super(message); + } + } + + @PostConstruct + public void startup() { + if (!checkSystemDirectories()) { + throw new ConfigurationError("Not all configuration checks passed successfully. See logs above."); + } + } + + /** + * In this method, we check the existence and write-ability of all important directories we use during + * normal operations. It does not include checks for the storage system. If directories are not available, + * try to create them (and fail when not allowed to). + * + * @return True if all checks successful, false otherwise. + */ + public boolean checkSystemDirectories() { + Map paths = Map.of( + Path.of(JvmSettings.UPLOADS_DIRECTORY.lookup()), "temporary JSF upload space (see " + JvmSettings.UPLOADS_DIRECTORY.getScopedKey() + ")", + Path.of(FileUtil.getFilesTempDirectory()), "temporary processing space (see " + JvmSettings.FILES_DIRECTORY.getScopedKey() + ")"); + + boolean success = true; + for (Path path : paths.keySet()) { + if (Files.notExists(path)) { + try { + Files.createDirectories(path); + } catch (IOException e) { + logger.log(Level.SEVERE, () -> "Could not create directory " + path + " for " + paths.get(path)); + success = false; + } + } else if (!Files.isWritable(path)) { + logger.log(Level.SEVERE, () -> "Directory " + path + " for " + paths.get(path) + " exists, but is not writeable"); + success = false; + } + } + return success; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index ff04a633ea7..c5c5682821a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -49,6 +49,7 @@ public enum JvmSettings { // FILES SETTINGS SCOPE_FILES(PREFIX, "files"), FILES_DIRECTORY(SCOPE_FILES, "directory"), + UPLOADS_DIRECTORY(SCOPE_FILES, "uploads"), // SOLR INDEX SETTINGS SCOPE_SOLR(PREFIX, "solr"), diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 6bb7e1d583b..ee1ee5a6a1c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -40,6 +40,7 @@ import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper; import edu.harvard.iq.dataverse.ingest.IngestableDataChecker; import edu.harvard.iq.dataverse.license.License; +import edu.harvard.iq.dataverse.settings.ConfigCheckService; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.file.BagItFileHandler; import edu.harvard.iq.dataverse.util.file.CreateDataFileResult; @@ -1478,25 +1479,17 @@ public static boolean canIngestAsTabular(String mimeType) { } } + /** + * Return the location where data should be stored temporarily after uploading (UI or API) + * for local processing (ingest, unzip, ...) and transfer to final destination (see storage subsystem). + * + * This location is checked to be configured, does exist, and is writeable via + * {@link ConfigCheckService#checkSystemDirectories()}. + * + * @return String with a path to the temporary location. Will not be null (former versions did to indicate failure) + */ public static String getFilesTempDirectory() { - - String filesRootDirectory = JvmSettings.FILES_DIRECTORY.lookup(); - String filesTempDirectory = filesRootDirectory + "/temp"; - - if (!Files.exists(Paths.get(filesTempDirectory))) { - /* Note that "createDirectories()" must be used - not - * "createDirectory()", to make sure all the parent - * directories that may not yet exist are created as well. - */ - try { - Files.createDirectories(Paths.get(filesTempDirectory)); - } catch (IOException ex) { - logger.severe("Failed to create filesTempDirectory: " + filesTempDirectory ); - return null; - } - } - - return filesTempDirectory; + return JvmSettings.FILES_DIRECTORY.lookup() + File.separator + "temp"; } public static void generateS3PackageStorageIdentifier(DataFile dataFile) { From 6999093dcea8e889a24aafbe84dd6035e8a4b5db Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Jun 2023 17:37:40 +0200 Subject: [PATCH 113/396] feat(conf): make docroot location configurable #9662 Add JVM Setting and add to config checker on startup to ensure target location is in good shape. --- .../harvard/iq/dataverse/settings/ConfigCheckService.java | 3 ++- .../edu/harvard/iq/dataverse/settings/JvmSettings.java | 1 + .../resources/META-INF/microprofile-config.properties | 1 + src/main/webapp/WEB-INF/glassfish-web.xml | 8 ++++---- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java index 4ba028903b0..443d12fc17a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java @@ -43,7 +43,8 @@ public void startup() { public boolean checkSystemDirectories() { Map paths = Map.of( Path.of(JvmSettings.UPLOADS_DIRECTORY.lookup()), "temporary JSF upload space (see " + JvmSettings.UPLOADS_DIRECTORY.getScopedKey() + ")", - Path.of(FileUtil.getFilesTempDirectory()), "temporary processing space (see " + JvmSettings.FILES_DIRECTORY.getScopedKey() + ")"); + Path.of(FileUtil.getFilesTempDirectory()), "temporary processing space (see " + JvmSettings.FILES_DIRECTORY.getScopedKey() + ")", + Path.of(JvmSettings.DOCROOT_DIRECTORY.lookup()), "docroot space (see " + JvmSettings.DOCROOT_DIRECTORY.getScopedKey() + ")"); boolean success = true; for (Path path : paths.keySet()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index c5c5682821a..540dc8201a0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -50,6 +50,7 @@ public enum JvmSettings { SCOPE_FILES(PREFIX, "files"), FILES_DIRECTORY(SCOPE_FILES, "directory"), UPLOADS_DIRECTORY(SCOPE_FILES, "uploads"), + DOCROOT_DIRECTORY(SCOPE_FILES, "docroot"), // SOLR INDEX SETTINGS SCOPE_SOLR(PREFIX, "solr"), diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index f3745126cb2..597d50b2e0c 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -18,6 +18,7 @@ dataverse.files.directory=/tmp/dataverse # been read yet! The names and their values are in sync here and over there to ensure the config checker # is able to check for the directories (exist + writeable). dataverse.files.uploads=${STORAGE_DIR:.}/uploads +dataverse.files.docroot=${STORAGE_DIR:.}/docroot # SEARCH INDEX dataverse.solr.host=localhost diff --git a/src/main/webapp/WEB-INF/glassfish-web.xml b/src/main/webapp/WEB-INF/glassfish-web.xml index 8041ebd4447..5088e5a7fba 100644 --- a/src/main/webapp/WEB-INF/glassfish-web.xml +++ b/src/main/webapp/WEB-INF/glassfish-web.xml @@ -10,10 +10,10 @@ - - - - + + + + + From 2913a52f35645621bace35c93a9c0b2707004da1 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 21 Jun 2023 18:32:55 +0200 Subject: [PATCH 117/396] refactor(conf): simplify sitemap output location lookup using new docroot setting --- .../iq/dataverse/sitemap/SiteMapUtil.java | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java index e32b811ee2c..86ae697f771 100644 --- a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java @@ -3,6 +3,8 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DvObjectContainer; +import edu.harvard.iq.dataverse.settings.ConfigCheckService; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.xml.XmlValidator; import java.io.File; @@ -210,16 +212,17 @@ public static boolean stageFileExists() { } return false; } - + + /** + * Lookup the location where to generate the sitemap. + * + * Note: the location is checked to be configured, does exist and is writeable in + * {@link ConfigCheckService#checkSystemDirectories()} + * + * @return Sitemap storage location ([docroot]/sitemap) + */ private static String getSitemapPathString() { - String sitemapPathString = "/tmp"; - // i.e. /usr/local/glassfish4/glassfish/domains/domain1 - String domainRoot = System.getProperty("com.sun.aas.instanceRoot"); - if (domainRoot != null) { - // Note that we write to a directory called "sitemap" but we serve just "/sitemap.xml" using PrettyFaces. - sitemapPathString = domainRoot + File.separator + "docroot" + File.separator + "sitemap"; - } - return sitemapPathString; + return JvmSettings.DOCROOT_DIRECTORY.lookup() + File.separator + "sitemap"; } } From fd4d9199fade70e6a2387ad02051bbf4865fa9f2 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 22 Jun 2023 10:33:37 -0400 Subject: [PATCH 118/396] add gb fragment --- .../guestbook-terms-popup-fragment.xhtml | 105 ++++++++++++++++++ 1 file changed, 105 insertions(+) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index 9c3391ef9ae..829fa6539b9 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -40,6 +40,111 @@ + + + +
    + +
    + + + +
    +
    +
    + +
    + + + +
    +
    +
    + +
    + + + +
    +
    +
    + +
    + + + +
    +
    +
    + +
    + +
    + + + + + + + + + + + +
    +
    +
    +
    +
    Date: Thu, 22 Jun 2023 10:34:11 -0400 Subject: [PATCH 119/396] change render param - not clear this fragment stays here though --- src/main/webapp/file-download-popup-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/file-download-popup-fragment.xhtml b/src/main/webapp/file-download-popup-fragment.xhtml index e1020c85e69..6fe3863b85f 100644 --- a/src/main/webapp/file-download-popup-fragment.xhtml +++ b/src/main/webapp/file-download-popup-fragment.xhtml @@ -138,7 +138,7 @@ value="#{MarkupChecker:sanitizeBasicHTML(workingVersion.termsOfUseAndAccess.termsOfAccess)}" escape="false" />
    - + #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType} @@ -167,7 +168,7 @@ action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'original' )}" update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> - + @@ -186,7 +187,7 @@ disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> - + #{bundle['file.downloadBtn.format.tab']} @@ -205,7 +206,7 @@ action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'RData' )}" update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');"> - + #{bundle['file.downloadBtn.format.rdata']} From 58bc6c9a264ca7ef5caf44ad3fbc3a04a38f67fb Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 22 Jun 2023 15:40:48 -0400 Subject: [PATCH 121/396] missing actionListeners --- src/main/webapp/filesFragment.xhtml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 4c1a943b86e..7e1cb4ac4cd 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -444,6 +444,7 @@ onclick="if (!testFilesSelected()) return false;" action="#{DatasetPage.startDownloadSelectedOriginal()}" update="@form" oncomplete="showPopup();"> + #{bundle.download} @@ -462,6 +463,7 @@ update="@form" oncomplete="showPopup();" onclick="if (!testFilesSelected()) return false;" actionListener="#{DatasetPage.startDownloadSelectedOriginal()}"> + #{bundle.downloadOriginal} From 60c4db0d593b455a02d6df2ebe7801b8c9dbca5e Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 22 Jun 2023 15:42:24 -0400 Subject: [PATCH 122/396] add missing params, change fileDownloadHelper to EJB --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- src/main/webapp/dataset.xhtml | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index f01fe00937e..40b0ba1a010 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -244,7 +244,7 @@ public enum DisplayMode { DatasetVersionUI datasetVersionUI; @Inject PermissionsWrapper permissionsWrapper; - @Inject + @EJB FileDownloadHelper fileDownloadHelper; @Inject ThumbnailServiceWrapper thumbnailServiceWrapper; diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index c39042a91ce..1cc8213ecd5 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1511,9 +1511,11 @@ + + From 1e8495cff14955a12f7829a55fb7b2a434c537e6 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 22 Jun 2023 16:07:04 -0400 Subject: [PATCH 123/396] use Inject and restore getter/setter --- .../java/edu/harvard/iq/dataverse/DatasetPage.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 40b0ba1a010..e890752c19c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -244,7 +244,7 @@ public enum DisplayMode { DatasetVersionUI datasetVersionUI; @Inject PermissionsWrapper permissionsWrapper; - @EJB + @Inject FileDownloadHelper fileDownloadHelper; @Inject ThumbnailServiceWrapper thumbnailServiceWrapper; @@ -5476,6 +5476,14 @@ public FileDownloadServiceBean getFileDownloadService() { public void setFileDownloadService(FileDownloadServiceBean fileDownloadService) { this.fileDownloadService = fileDownloadService; } + + public FileDownloadHelper getFileDownloadHelper() { + return fileDownloadHelper; + } + + public void setFileDownloadHelper(FileDownloadHelper fileDownloadHelper) { + this.fileDownloadHelper = fileDownloadHelper; + } public GuestbookResponseServiceBean getGuestbookResponseService() { From 1400e07486223465641e289756c7d38577dbf00b Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 23 Jun 2023 12:28:15 -0400 Subject: [PATCH 124/396] fix gb render conditions, initial fix for download buttons --- .../webapp/guestbook-terms-popup-fragment.xhtml | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index 829fa6539b9..ab75ffbe3e7 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -40,7 +40,7 @@ - + - + From 51005bad3afd714aa8edbbbb03eb0faf604470c2 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 23 Jun 2023 15:31:22 -0400 Subject: [PATCH 128/396] update fragment with license info and buttons for other views --- .../guestbook-terms-popup-fragment.xhtml | 195 +++++++++++++++--- 1 file changed, 168 insertions(+), 27 deletions(-) diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index ab75ffbe3e7..4bddbb0dd38 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -9,37 +9,138 @@ xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs"> -

    - #{someActivelyEmbargoedFiles ? bundle['file.requestAccessTermsDialog.embargoed.tip'] : bundle['file.requestAccessTermsDialog.tip']} -

    -

    - #{bundle['file.requestAccessTermsDialog.embargoed']} -

    -
    -
    -
    - +
    diff --git a/src/main/webapp/file-info-fragment.xhtml b/src/main/webapp/file-info-fragment.xhtml index 33a8d2c3ca5..ca82738f920 100644 --- a/src/main/webapp/file-info-fragment.xhtml +++ b/src/main/webapp/file-info-fragment.xhtml @@ -67,7 +67,7 @@
    - +
    diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index 3772b3c4dbe..744b593dbd8 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -298,7 +298,7 @@
    - + @@ -306,7 +306,7 @@
    - + From e506279ad28018ec71e4990e0d99e36c66fe6a26 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 21 Sep 2023 14:51:37 -0400 Subject: [PATCH 282/396] fix recursive calls --- .../java/edu/harvard/iq/dataverse/GuestbookResponse.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index b27571cdbf5..3fb0d6691bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -147,19 +147,19 @@ public void setWriteResponse(boolean writeResponse) { } public String getSelectedFileIds(){ - return this.getSelectedFileIds(); + return this.selectedFileIds; } public void setSelectedFileIds(String selectedFileIds) { - this.setSelectedFileIds(selectedFileIds); + this.selectedFileIds = selectedFileIds; } public String getFileFormat() { - return this.getFileFormat(); + return this.fileFormat; } public void setFileFormat(String downloadFormat) { - this.setFileFormat(downloadFormat); + this.fileFormat = downloadFormat; } public ExternalTool getExternalTool() { From bfce43eca504192e553d85bce103f88aa481af88 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 21 Sep 2023 15:55:12 -0400 Subject: [PATCH 283/396] update stored function, make update idempotent --- .../V6.0.0.1__9599-guestbook-at-request.sql | 29 ++++++++++++------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql index df98047b513..fd892fd3356 100644 --- a/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql +++ b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql @@ -1,12 +1,19 @@ ALTER TABLE guestbookresponse ADD COLUMN IF NOT EXISTS eventtype VARCHAR(255); ALTER TABLE guestbookresponse ADD COLUMN IF NOT EXISTS sessionid VARCHAR(255); -UPDATE guestbookresponse g - SET eventtype = (SELECT downloadtype FROM filedownload f where f.guestbookresponse_id = g.id), - sessionid = (SELECT sessionid FROM filedownload f where f.guestbookresponse_id=g.id); - -DROP TABLE filedownload; +DO $$ + BEGIN + IF EXISTS (select 1 from pg_class where relname='filedownload') THEN + UPDATE guestbookresponse g + SET eventtype = (SELECT downloadtype FROM filedownload f where f.guestbookresponse_id = g.id), + sessionid = (SELECT sessionid FROM filedownload f where f.guestbookresponse_id=g.id); + DROP TABLE filedownload; + END IF; + END + $$ ; + + -- This creates a function that ESTIMATES the size of the -- GuestbookResponse table (for the metrics display), instead -- of relying on straight "SELECT COUNT(*) ..." @@ -17,18 +24,20 @@ RETURNS bigint AS $$ DECLARE estimatedsize bigint; BEGIN - SELECT CASE WHEN relpages=0 THEN 0 + SELECT CASE WHEN relpages<10 THEN 0 ELSE ((reltuples / relpages) * (pg_relation_size('public.guestbookresponse') / current_setting('block_size')::int))::bigint - * (SELECT CASE WHEN ((select count(*) from pg_stats where tablename='guestbookresponse') = 0) THEN 1 - ELSE 1 - (SELECT (most_common_freqs::text::bigint[])[array_position(most_common_vals::text::text[], 'AccessRequest')] - FROM pg_stats WHERE tablename='guestbookresponse' and attname='eventtype') END) + * (SELECT CASE WHEN ((select count(*) from pg_stats where tablename='guestbookresponse') = 0 + OR (select array_position(most_common_vals::text::text[], 'AccessRequest') + FROM pg_stats WHERE tablename='guestbookresponse' AND attname='eventtype') IS NULL) THEN 1 + ELSE 1 - (SELECT (most_common_freqs::text::text[])[array_position(most_common_vals::text::text[], 'AccessRequest')]::bigint + FROM pg_stats WHERE tablename='guestbookresponse' and attname='eventtype') END) END FROM pg_class WHERE oid = 'public.guestbookresponse'::regclass INTO estimatedsize; if estimatedsize = 0 then - SELECT COUNT(id) FROM guestbookresponse INTO estimatedsize; + SELECT COUNT(id) FROM guestbookresponse WHERE eventtype!= 'AccessRequest' INTO estimatedsize; END if; RETURN estimatedsize; From efa3d518a8184bef56b59ead26afba3af53ccb03 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 22 Sep 2023 09:56:28 -0400 Subject: [PATCH 284/396] remove todo --- .../iq/dataverse/GuestbookResponse.java | 29 +++++++------------ 1 file changed, 11 insertions(+), 18 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 3fb0d6691bf..203be69cf14 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -90,18 +90,19 @@ public class GuestbookResponse implements Serializable { @Temporal(value = TemporalType.TIMESTAMP) private Date responseTime; - - /** - * Possible values for downloadType include "Download", "Subset", - * or the displayName of an ExternalTool. - * - * TODO: Types like "Download" and "Subset" should - * be defined once as constants (likely an enum) rather than having these - * strings duplicated in various places when setDownloadtype() is called. - */ - private String eventType; + private String sessionId; + private String eventType; + + /** Event Types - there are four pre-defined values in use. + * The type can also be the name of a previewer/explore tool + */ + static final String ACCESS_REQUEST = "AccessRequest"; + static final String DOWNLOAD = "Download"; + static final String SUBSET = "Subset"; + static final String EXPLORE = "Explore"; + /* Transient Values carry non-written information that will assist in the download process @@ -128,14 +129,6 @@ public class GuestbookResponse implements Serializable { @Transient private ExternalTool externalTool; - /* Event Types - there are four pre-defined values in use. - * The type can also be the name of a previewer/explore tool - */ - - static final String ACCESS_REQUEST = "AccessRequest"; - static final String DOWNLOAD = "Download"; - static final String SUBSET = "Subset"; - static final String EXPLORE = "Explore"; public boolean isWriteResponse() { From 0350f99697d7099f7a4e89cc41064a3bae25d60b Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 22 Sep 2023 10:14:30 -0400 Subject: [PATCH 285/396] add notes, reference issue/PR about the estimation method --- .../db/migration/V6.0.0.1__9599-guestbook-at-request.sql | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql index fd892fd3356..109963d8382 100644 --- a/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql +++ b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql @@ -17,7 +17,12 @@ DO $$ -- This creates a function that ESTIMATES the size of the -- GuestbookResponse table (for the metrics display), instead -- of relying on straight "SELECT COUNT(*) ..." +-- It uses statistics to estimate the number of guestbook entries +-- and the fraction of them related to downloads, +-- i.e. those that weren't created for 'AccessRequest' events. -- Significant potential savings for an active installation. +-- See https://github.com/IQSS/dataverse/issues/8840 and +-- https://github.com/IQSS/dataverse/pull/8972 for more details CREATE OR REPLACE FUNCTION estimateGuestBookResponseTableSize() RETURNS bigint AS $$ From b3808c5f5d4b058a5be359c27a35254936a37266 Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva Date: Fri, 22 Sep 2023 12:20:31 -0400 Subject: [PATCH 286/396] We talked about this on the Containerization Working Group meeting on 2023-09-21. Deleting the duplicated chmod and moving the bootstrap script copy to be executed before the chmod. --- modules/container-configbaker/Dockerfile | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile index 44f3806a591..2975b043213 100644 --- a/modules/container-configbaker/Dockerfile +++ b/modules/container-configbaker/Dockerfile @@ -26,8 +26,12 @@ RUN true && \ # Make our working directories mkdir -p ${SCRIPT_DIR} ${SECRETS_DIR} ${SOLR_TEMPLATE} -# Get in the scripts and make them executable (just in case...) +# Get in the scripts COPY maven/scripts maven/solr/update-fields.sh ${SCRIPT_DIR}/ +# Copy the data from scripts/api that provide the common base setup you'd get from the installer. +# ".dockerignore" will take care of taking only the bare necessities +COPY maven/setup ${SCRIPT_DIR}/bootstrap/base/ +# Make the scripts executable RUN chmod +x ${SCRIPT_DIR}/*.sh ${BOOTSTRAP_DIR}/*/*.sh # Copy the Solr config bits @@ -35,10 +39,8 @@ COPY --from=solr /opt/solr/server/solr/configsets/_default ${SOLR_TEMPLATE}/ COPY maven/solr/*.xml ${SOLR_TEMPLATE}/conf/ RUN rm ${SOLR_TEMPLATE}/conf/managed-schema.xml -# Copy the data from scripts/api that provide the common base setup you'd get from the installer. -# ".dockerignore" will take care of taking only the bare necessities -COPY maven/setup ${SCRIPT_DIR}/bootstrap/base/ -RUN chmod +x ${BOOTSTRAP_DIR}/*/*.sh + + # Set the entrypoint to tini (as a process supervisor) ENTRYPOINT ["/usr/bin/dumb-init", "--"] From 53a901c8ce60edd4cfd87a179092286dd9520a86 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 22 Sep 2023 14:24:29 -0400 Subject: [PATCH 287/396] update metrics api queries --- .../java/edu/harvard/iq/dataverse/GuestbookResponse.java | 3 +-- .../harvard/iq/dataverse/metrics/MetricsServiceBean.java | 8 ++++++++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java index 203be69cf14..976f1e084ac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java @@ -98,7 +98,7 @@ public class GuestbookResponse implements Serializable { * The type can also be the name of a previewer/explore tool */ - static final String ACCESS_REQUEST = "AccessRequest"; + public static final String ACCESS_REQUEST = "AccessRequest"; static final String DOWNLOAD = "Download"; static final String SUBSET = "Subset"; static final String EXPLORE = "Explore"; @@ -129,7 +129,6 @@ public class GuestbookResponse implements Serializable { @Transient private ExternalTool externalTool; - public boolean isWriteResponse() { return writeResponse; diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java index 065b42e5afe..837cd518817 100644 --- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.GuestbookResponse; import edu.harvard.iq.dataverse.Metric; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil.MetricType; @@ -424,6 +425,7 @@ public JsonArray downloadsTimeSeries(Dataverse d) { + "select distinct COALESCE(to_char(responsetime, 'YYYY-MM'),'" + earliest + "') as date, count(id)\n" + "from guestbookresponse\n" + ((d == null) ? "" : "where dataset_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ")") + + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + " group by COALESCE(to_char(responsetime, 'YYYY-MM'),'" + earliest + "') order by COALESCE(to_char(responsetime, 'YYYY-MM'),'" + earliest + "');"); logger.log(Level.FINE, "Metric query: {0}", query); @@ -456,6 +458,7 @@ public long downloadsToMonth(String yyyymm, Dataverse d) throws ParseException { + "from guestbookresponse\n" + "where (date_trunc('month', responsetime) <= to_date('" + yyyymm + "','YYYY-MM')" + "or responsetime is NULL)\n" // includes historic guestbook records without date + + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + ((d==null) ? ";": "AND dataset_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ");") ); logger.log(Level.FINE, "Metric query: {0}", query); @@ -477,6 +480,7 @@ public long downloadsPastDays(int days, Dataverse d) { + "select count(id)\n" + "from guestbookresponse\n" + "where responsetime > current_date - interval '" + days + "' day\n" + + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + ((d==null) ? ";": "AND dataset_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ");") ); logger.log(Level.FINE, "Metric query: {0}", query); @@ -489,6 +493,7 @@ public JsonArray fileDownloadsTimeSeries(Dataverse d, boolean uniqueCounts) { + " FROM guestbookresponse gb, DvObject ob" + " where ob.id = gb.datafile_id " + ((d == null) ? "" : " and ob.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ")\n") + + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + "group by gb.datafile_id, ob.id, ob.protocol, ob.authority, ob.identifier, to_char(gb.responsetime, 'YYYY-MM') order by to_char(gb.responsetime, 'YYYY-MM');"); logger.log(Level.FINE, "Metric query: {0}", query); @@ -503,6 +508,7 @@ public JsonArray fileDownloads(String yyyymm, Dataverse d, boolean uniqueCounts) + " where ob.id = gb.datafile_id " + ((d == null) ? "" : " and ob.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ")\n") + " and date_trunc('month', gb.responsetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + "group by gb.datafile_id, ob.id, ob.protocol, ob.authority, ob.identifier order by count desc;"); logger.log(Level.FINE, "Metric query: {0}", query); @@ -529,6 +535,7 @@ public JsonArray uniqueDownloadsTimeSeries(Dataverse d) { + " FROM guestbookresponse gb, DvObject ob" + " where ob.id = gb.dataset_id " + ((d == null) ? "" : " and ob.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") + + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + "group by gb.dataset_id, ob.protocol, ob.authority, ob.identifier, to_char(gb.responsetime, 'YYYY-MM') order by to_char(gb.responsetime, 'YYYY-MM');"); logger.log(Level.FINE, "Metric query: {0}", query); @@ -546,6 +553,7 @@ public JsonArray uniqueDatasetDownloads(String yyyymm, Dataverse d) { + " where ob.id = gb.dataset_id " + ((d == null) ? "" : " and ob.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n") + " and date_trunc('month', responsetime) <= to_date('" + yyyymm + "','YYYY-MM')\n" + + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + "group by gb.dataset_id, ob.protocol, ob.authority, ob.identifier order by count(distinct email) desc;"); JsonArrayBuilder jab = Json.createArrayBuilder(); try { From 740f63bd852abdd19c7f7aa43b2f641519ae0a66 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 22 Sep 2023 14:52:23 -0400 Subject: [PATCH 288/396] fix query --- .../edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java index 837cd518817..79369207963 100644 --- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java @@ -425,7 +425,7 @@ public JsonArray downloadsTimeSeries(Dataverse d) { + "select distinct COALESCE(to_char(responsetime, 'YYYY-MM'),'" + earliest + "') as date, count(id)\n" + "from guestbookresponse\n" + ((d == null) ? "" : "where dataset_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ")") - + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + + ((d == null) ? "where ":" and ") + "eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n" + " group by COALESCE(to_char(responsetime, 'YYYY-MM'),'" + earliest + "') order by COALESCE(to_char(responsetime, 'YYYY-MM'),'" + earliest + "');"); logger.log(Level.FINE, "Metric query: {0}", query); From 21c4a7fbaa48ace2ba588a72103af619cd70df02 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 25 Sep 2023 10:36:12 -0400 Subject: [PATCH 289/396] downloadtype -> eventtype --- .../iq/dataverse/GuestbookResponseServiceBean.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java index 4800ffd439f..b0cc41eb448 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java @@ -63,7 +63,7 @@ public class GuestbookResponseServiceBean { + " and r.dataset_id = o.id " + " and r.guestbook_id = g.id ";*/ - private static final String BASE_QUERY_STRING_FOR_DOWNLOAD_AS_CSV = "select r.id, g.name, o.id, r.responsetime, r.downloadtype," + private static final String BASE_QUERY_STRING_FOR_DOWNLOAD_AS_CSV = "select r.id, g.name, o.id, r.responsetime, r.eventtype," + " m.label, r.dataFile_id, r.name, r.email, r.institution, r.position," + " o.protocol, o.authority, o.identifier, d.protocol, d.authority, d.identifier " + "from guestbookresponse r, filemetadata m, dvobject o, guestbook g, dvobject d " @@ -78,7 +78,7 @@ public class GuestbookResponseServiceBean { // on the guestbook-results.xhtml page (the info we show on the page is // less detailed than what we let the users download as CSV files, so this // query has fewer fields than the one above). -- L.A. - private static final String BASE_QUERY_STRING_FOR_PAGE_DISPLAY = "select r.id, v.value, r.responsetime, r.downloadtype, m.label, r.name " + private static final String BASE_QUERY_STRING_FOR_PAGE_DISPLAY = "select r.id, v.value, r.responsetime, r.eventtype, m.label, r.name " + "from guestbookresponse r, datasetfieldvalue v, filemetadata m , dvobject o " + "where " + " v.datasetfield_id = (select id from datasetfield f where datasetfieldtype_id = 1 " @@ -735,8 +735,8 @@ public GuestbookResponse initGuestbookResponse(FileMetadata fileMetadata, String * "externalTool" for all external tools, including TwoRavens. When * clicking "Explore" and then the name of the tool, we want the * name of the exploration tool (i.e. "Data Explorer", - * etc.) to be persisted as the downloadType. We execute - * guestbookResponse.setDownloadtype(externalTool.getDisplayName()) + * etc.) to be persisted as the eventType. We execute + * guestbookResponse.setEventType(externalTool.getDisplayName()) * over in the "explore" method of FileDownloadServiceBean just * before the guestbookResponse is written. */ From e5575135b474eca7457a620646d95c4cc839aad9 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 25 Sep 2023 12:36:52 -0400 Subject: [PATCH 290/396] fix popup logic for previews, fix old bug guestbookAndTermsPopupRequired (and downloadPopupRequired before it) was not defined and I think therefore was always false, meaning the option to show a popup never occurred. This may have been OK in practice since one would have to have accepted the terms popup to show the preview and preview pane with these buttons. The fix here should show the terms popup prior to allowing the explore button to be clicked if/when these buttons ever show and one hasn't already accepted the terms (and termsMet is therefore true). --- src/main/webapp/file.xhtml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index 744b593dbd8..f69b5c35afd 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -364,12 +364,12 @@ - + - + -
    +
    + styleClass="largePopUp" widgetVar="deleteConfirmation" modal="true" focus="contDeleteTemplateBtn">

     

    From 28df26c7d9f88b4c594e21bfcf1fbd4b695dc1b1 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 28 Sep 2023 16:34:20 -0400 Subject: [PATCH 320/396] #9913 make request access true on create template also fix various render issues. --- .../harvard/iq/dataverse/TemplatePage.java | 1 + src/main/webapp/dataset-license-terms.xhtml | 25 +++++++++++-------- 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java b/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java index fff520fd259..44070dcbb41 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java @@ -148,6 +148,7 @@ public String init() { editMode = TemplatePage.EditMode.CREATE; template = new Template(this.dataverse, settingsWrapper.getSystemMetadataBlocks()); TermsOfUseAndAccess terms = new TermsOfUseAndAccess(); + terms.setFileAccessRequest(true); terms.setTemplate(template); terms.setLicense(licenseServiceBean.getDefault()); template.setTermsOfUseAndAccess(terms); diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index b998442f2aa..c54d94442ea 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -7,6 +7,13 @@ xmlns:o="http://omnifaces.org/ui" xmlns:jsf="http://xmlns.jcp.org/jsf"> + +
    @@ -238,17 +245,13 @@
    -
    +
     
    -
    +
    @@ -267,7 +270,7 @@
    -
    +
    -
    +
    -

    +

    -

    +

    From 29e56bbe5a3e0ec8f80d98e8d858665d6b63250d Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 28 Sep 2023 17:17:16 -0400 Subject: [PATCH 321/396] saving temp. changes --- .../edu/harvard/iq/dataverse/EditDatafilesPage.java | 10 ++-------- .../engine/command/impl/CreateNewDataFilesCommand.java | 6 +++++- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 4144112f997..7b8f0355f4c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -2187,16 +2187,10 @@ public void handleExternalUpload() { List datafiles = new ArrayList<>(); // ----------------------------------------------------------- - // Send it through the ingest service + // Execute the CreateNewDataFiles command: // ----------------------------------------------------------- try { - - // Note: A single uploaded file may produce multiple datafiles - - // for example, multiple files can be extracted from an uncompressed - // zip file. - //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); - ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType, systemConfig); - + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, userStorageQuota, checksumValue, checksumType); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index e7fc30bdd80..6fa3c721c38 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -648,15 +648,19 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException // if we were unable to unpack an uploaded file, etc.), we'll just // create and return a single DataFile: File newFile = null; + long fileSize = -1; + if (tempFile != null) { newFile = tempFile.toFile(); + fileSize = newFile.length(); + } else { + // @todo! What do we do if this is direct upload?? where does the size come from? } // We have already checked that this file does not exceed the individual size limit; // but if we are processing it as is, as a single file, we need to check if // its size does not go beyond the allocated storage quota (if specified): - long fileSize = newFile.length(); if (storageQuotaLimit != null && fileSize > storageQuotaLimit) { try { From 7ab736c05a2aac0b7137861d46a6224ed561e228 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 29 Sep 2023 15:29:46 -0400 Subject: [PATCH 322/396] Fixes direct upload from the UI. (#9361) --- .../iq/dataverse/EditDatafilesPage.java | 21 +++++- .../impl/CreateNewDataFilesCommand.java | 66 +++++++++++-------- 2 files changed, 57 insertions(+), 30 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 7b8f0355f4c..a942830b19e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -2068,7 +2068,7 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { // dataset that does not yet exist in the database. We must // use the version of the Create New Files constructor that takes // the parent Dataverse as the extra argument: - cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, userStorageQuota, null, null, workingVersion.getDataset().getOwner()); + cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, userStorageQuota, null, null, null, workingVersion.getDataset().getOwner()); } else { cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, userStorageQuota, null); } @@ -2168,6 +2168,11 @@ public void handleExternalUpload() { - Max size NOT specified in db: default is unlimited - Max size specified in db: check too make sure file is within limits // ---------------------------- */ + /** + * @todo: this size check is probably redundant here, since the new + * CreateNewFilesCommand is going to perform it (and the quota + * checks too, if enabled + */ if ((!this.isUnlimitedUploadFileSize()) && (fileSize > this.getMaxFileUploadSizeInBytes())) { String warningMessage = "Uploaded file \"" + fileName + "\" exceeded the limit of " + fileSize + " bytes and was not uploaded."; sio.delete(); @@ -2189,9 +2194,21 @@ public void handleExternalUpload() { // ----------------------------------------------------------- // Execute the CreateNewDataFiles command: // ----------------------------------------------------------- + + Dataverse parent = null; + + if (mode == FileEditMode.CREATE) { + // This is a file upload in the context of creating a brand new + // dataset that does not yet exist in the database. We must + // pass the parent Dataverse to the CreateNewFiles command + // constructor. The RequiredPermission on the command in this + // scenario = Permission.AddDataset on the parent dataverse. + parent = workingVersion.getDataset().getOwner(); + } + try { - Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, userStorageQuota, checksumValue, checksumType); + Command cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, userStorageQuota, checksumValue, checksumType, fileSize, parent); CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd); datafiles = createDataFilesResult.getDataFiles(); Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage)); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index 6fa3c721c38..ac701da1be9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -74,34 +74,29 @@ public class CreateNewDataFilesCommand extends AbstractCommand storageQuotaLimit) { - try { - tempFile.toFile().delete(); - } catch (Exception ex) { - // ignore - but log a warning - logger.warning("Could not remove temp file " + tempFile.getFileName()); + if (newFile != null) { + // Remove the temp. file, if this is a non-direct upload. + // If this is a direct upload, it will be a responsibility of the + // component calling the command to remove the file that may have + // already been saved in the S3 volume. + try { + newFile.delete(); + } catch (Exception ex) { + // ignore - but log a warning + logger.warning("Could not remove temp file " + tempFile.getFileName()); + } } throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit)), this); } DataFile datafile = FileUtil.createSingleDataFile(version, newFile, newStorageIdentifier, fileName, finalType, newCheckSumType, newCheckSum); - File f = null; - if (tempFile != null) { - f = tempFile.toFile(); - } - if (datafile != null && ((f != null) || (newStorageIdentifier != null))) { + + if (datafile != null && ((newFile != null) || (newStorageIdentifier != null))) { if (warningMessage != null) { createIngestFailureReport(datafile, warningMessage); datafile.SetIngestProblem(); } + if (datafile.getFilesize() < 0) { + datafile.setFilesize(fileSize); + } datafiles.add(datafile); // Update quota (may not be necessary in the context of direct upload - ?) @@ -701,7 +711,7 @@ public Map> getRequiredPermissions() { ret.put("", new HashSet<>()); - if (dataverse != null) { + if (parentDataverse != null) { // The command is called in the context of uploading files on // create of a new dataset ret.get("").add(Permission.AddDataset); From 7881919ede378886fe6283086a02a4dfea8989a8 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 29 Sep 2023 16:07:09 -0400 Subject: [PATCH 323/396] Update native-api.rst misplaced quote marks --- doc/sphinx-guides/source/api/native-api.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 176cb36e288..b9b0bd32aec 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2283,7 +2283,7 @@ To set the behavior for this dataset: export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/YD5QDG - curl -X PUT "-H:X-Dataverse-key:$API_TOKEN" -H Content-type:application/json -d true "$SERVER_URL/api/datasets/:persistentId/guestbookEntryAtRequest?persistentId=$PERSISTENT_IDENTIFIER" + curl -X PUT -H "X-Dataverse-key:$API_TOKEN" -H Content-type:application/json -d true "$SERVER_URL/api/datasets/:persistentId/guestbookEntryAtRequest?persistentId=$PERSISTENT_IDENTIFIER" This example uses true to set the behavior to guestbook at request. Note that this call will return a 403/Forbidden response if guestbook at request functionality is not enabled for this Dataverse instance. @@ -2296,7 +2296,7 @@ The API can also be used to reset the dataset to use the default/inherited value export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/YD5QDG - curl -X DELETE "-H:X-Dataverse-key:$API_TOKEN" -H Content-type:application/json "$SERVER_URL/api/datasets/:persistentId/guestbookEntryAtRequest?persistentId=$PERSISTENT_IDENTIFIER" + curl -X DELETE -H "X-Dataverse-key:$API_TOKEN" -H Content-type:application/json "$SERVER_URL/api/datasets/:persistentId/guestbookEntryAtRequest?persistentId=$PERSISTENT_IDENTIFIER" From 907fd4024c8df2218764fd0902d1242a37726f7e Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 2 Oct 2023 10:48:36 +0100 Subject: [PATCH 324/396] Changed: using query-based implementation for files download size --- .../DatasetVersionFilesServiceBean.java | 57 +++++++++++++++++++ .../harvard/iq/dataverse/api/Datasets.java | 12 +++- .../harvard/iq/dataverse/api/DatasetsIT.java | 30 +++++++--- .../edu/harvard/iq/dataverse/api/UtilIT.java | 4 +- 4 files changed, 89 insertions(+), 14 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index a547a216ad5..66e0ec5b5fe 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.QDataFileCategory; +import edu.harvard.iq.dataverse.QDataTable; import edu.harvard.iq.dataverse.QDvObject; import edu.harvard.iq.dataverse.QEmbargo; import edu.harvard.iq.dataverse.QFileMetadata; @@ -36,6 +37,7 @@ public class DatasetVersionFilesServiceBean implements Serializable { private final QFileMetadata fileMetadata = QFileMetadata.fileMetadata; private final QDvObject dvObject = QDvObject.dvObject; private final QDataFileCategory dataFileCategory = QDataFileCategory.dataFileCategory; + private final QDataTable dataTable = QDataTable.dataTable; /** * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} @@ -51,6 +53,19 @@ public enum DataFileAccessStatus { Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic } + /** + * Mode to base the search in {@link DatasetVersionFilesServiceBean#getFilesDownloadSize(DatasetVersion, FileDownloadSizeMode)} + *

    + * All: Includes both archival and original sizes for tabular files + * Archival: Includes only the archival size for tabular files + * Original: Includes only the original size for tabular files + *

    + * All the modes include archival sizes for non-tabular files + */ + public enum FileDownloadSizeMode { + All, Original, Archival + } + /** * Given a DatasetVersion, returns its total file metadata count * @@ -159,6 +174,23 @@ public List getFileMetadatas(DatasetVersion datasetVersion, Intege return baseQuery.fetch(); } + /** + * Returns the total download size of all files for a particular DatasetVersion + * + * @param datasetVersion the DatasetVersion to access + * @param mode a FileDownloadSizeMode to base the search on + * @return long value of total file download size + */ + public long getFilesDownloadSize(DatasetVersion datasetVersion, FileDownloadSizeMode mode) { + return switch (mode) { + case All -> + Long.sum(getOriginalTabularFilesSize(datasetVersion), getArchivalFilesSize(datasetVersion, false)); + case Original -> + Long.sum(getOriginalTabularFilesSize(datasetVersion), getArchivalFilesSize(datasetVersion, true)); + case Archival -> getArchivalFilesSize(datasetVersion, false); + }; + } + private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, DataFileAccessStatus dataFileAccessStatus) { long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus); if (fileMetadataCount > 0) { @@ -230,4 +262,29 @@ private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery qu break; } } + + private long getOriginalTabularFilesSize(DatasetVersion datasetVersion) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + Long result = queryFactory + .from(fileMetadata) + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())) + .from(dataTable) + .where(fileMetadata.dataFile.dataTables.isNotEmpty().and(dataTable.dataFile.eq(fileMetadata.dataFile))) + .select(dataTable.originalFileSize.sum()).fetchFirst(); + return (result == null) ? 0 : result; + } + + private long getArchivalFilesSize(DatasetVersion datasetVersion, boolean ignoreTabular) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + JPAQuery baseQuery = queryFactory + .from(fileMetadata) + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); + Long result; + if (ignoreTabular) { + result = baseQuery.where(fileMetadata.dataFile.dataTables.isEmpty()).select(fileMetadata.dataFile.filesize.sum()).fetchFirst(); + } else { + result = baseQuery.select(fileMetadata.dataFile.filesize.sum()).fetchFirst(); + } + return (result == null) ? 0 : result; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 981cbced11e..80a2dac9568 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2962,13 +2962,19 @@ public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam( public Response getDownloadSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("versionId") String version, - @QueryParam("ignoreOriginalTabularSize") boolean ignoreOriginalTabularSize, + @QueryParam("mode") String mode, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response(req -> { + DatasetVersionFilesServiceBean.FileDownloadSizeMode fileDownloadSizeMode; + try { + fileDownloadSizeMode = mode != null ? DatasetVersionFilesServiceBean.FileDownloadSizeMode.valueOf(mode) : DatasetVersionFilesServiceBean.FileDownloadSizeMode.All; + } catch (IllegalArgumentException e) { + return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode); + } DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers); - Long datasetStorageSize = ignoreOriginalTabularSize ? DatasetUtil.getDownloadSizeNumeric(datasetVersion, false) - : execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, datasetVersion)); + long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileDownloadSizeMode); String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); jsonObjectBuilder.add("message", message); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 580a1edb6f2..189cf3a6f5a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3632,7 +3632,7 @@ public void deaccessionDataset() { } @Test - public void getDownloadSize() throws IOException { + public void getDownloadSize() throws IOException, InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -3658,7 +3658,8 @@ public void getDownloadSize() throws IOException { int expectedTextFilesStorageSize = testFileSize1 + testFileSize2; - Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, false, apiToken); + // Get the total size when there are no tabular files + Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedTextFilesStorageSize)); @@ -3670,20 +3671,31 @@ public void getDownloadSize() throws IOException { // Get the original tabular file size int tabularOriginalSize = Integer.parseInt(uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.filesize")); - // Get the size ignoring the original tabular file sizes - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, true, apiToken); + // Ensure tabular file is ingested + Thread.sleep(2000); + + // Get the total size ignoring the original tabular file sizes + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Archival.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()); int actualSizeIgnoringOriginalTabularSizes = Integer.parseInt(getDownloadSizeResponse.getBody().jsonPath().getString("data.storageSize")); + // Assert that the size has been incremented with the last uploaded file assertTrue(actualSizeIgnoringOriginalTabularSizes > expectedTextFilesStorageSize); - // Get the size including the original tabular file sizes - int tabularProcessedSize = actualSizeIgnoringOriginalTabularSizes - expectedTextFilesStorageSize; - int expectedSizeIncludingOriginalTabularSizes = tabularOriginalSize + tabularProcessedSize + expectedTextFilesStorageSize; + // Get the total size including only original sizes and ignoring archival sizes for tabular files + int expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedTextFilesStorageSize; + + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); + + // Get the total size including both the original and archival tabular file sizes + int tabularArchivalSize = actualSizeIgnoringOriginalTabularSizes - expectedTextFilesStorageSize; + int expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedTextFilesStorageSize; - getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, false, apiToken); + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) - .body("data.storageSize", equalTo(expectedSizeIncludingOriginalTabularSizes)); + .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index ecf26bd26ae..f9f3dc9be8d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3410,10 +3410,10 @@ static Response deaccessionDataset(Integer datasetId, String version, String api .put("/api/datasets/" + datasetId + "/versions/" + version + "/deaccession"); } - static Response getDownloadSize(Integer datasetId, String version, boolean ignoreOriginalTabularSize, String apiToken) { + static Response getDownloadSize(Integer datasetId, String version, String mode, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .queryParam("ignoreOriginalTabularSize", ignoreOriginalTabularSize) + .queryParam("mode", mode) .get("/api/datasets/" + datasetId + "/versions/" + version + "/downloadsize"); } } From a5c32bd1b11f4385926f9abc53578e6b48c05adc Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 2 Oct 2023 10:53:45 +0100 Subject: [PATCH 325/396] Added: error case to getDownloadSize IT --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 189cf3a6f5a..ee3355096b8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3697,5 +3697,11 @@ public void getDownloadSize() throws IOException, InterruptedException { getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); + + // Get the total size sending invalid file download size mode + String invalidMode = "invalidMode"; + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, invalidMode, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Invalid mode: " + invalidMode)); } } From 131cd8f83473e9919e871723551eb441b6f27c3e Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 2 Oct 2023 11:22:44 +0100 Subject: [PATCH 326/396] Added: multiple tab files test case for getDownloadSize IT --- .../harvard/iq/dataverse/api/DatasetsIT.java | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index ee3355096b8..829c19c6440 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3703,5 +3703,23 @@ public void getDownloadSize() throws IOException, InterruptedException { getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, invalidMode, apiToken); getDownloadSizeResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid mode: " + invalidMode)); + + // Upload second test tabular file (same source as before) + uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get the total size including only original sizes and ignoring archival sizes for tabular files + expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedSizeIncludingOnlyOriginalForTabular; + + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); + + // Get the total size including both the original and archival tabular file sizes + expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedSizeIncludingAllSizes; + + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); } } From d1d5eed683dfc8d04cdf832d5c40d4947821b8c3 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 2 Oct 2023 12:34:33 +0200 Subject: [PATCH 327/396] style(oidc): apply language fixes by @pdurbin from review #9268 Co-authored-by: Philip Durbin --- doc/release-notes/9268-8349-oidc-improvements.md | 15 ++++++++++++++- doc/sphinx-guides/source/installation/oidc.rst | 10 +++++----- 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/doc/release-notes/9268-8349-oidc-improvements.md b/doc/release-notes/9268-8349-oidc-improvements.md index cb0a9685c69..34e45a98101 100644 --- a/doc/release-notes/9268-8349-oidc-improvements.md +++ b/doc/release-notes/9268-8349-oidc-improvements.md @@ -5,7 +5,7 @@ With this release it is possible to provision a single OIDC-based authentication provider by using MicroProfile Config instead of or in addition to the classic Admin API provisioning. -If you are using an external OIDC provider component as identity management system and/or broker +If you are using an external OIDC provider component as an identity management system and/or broker to other authentication providers such as Google, eduGain SAML and so on, this might make your life easier during instance setups and reconfiguration. You no longer need to generate the necessary JSON file. @@ -26,3 +26,16 @@ The testing and development Keycloak realm has been updated with more users and The support for setting JVM options during testing has been improved for developers. You now may add the `@JvmSetting` annotation to classes (also inner classes) and reference factory methods for values. This improvement is also paving the way to enable manipulating JVM options during end-to-end tests on remote ends. + +## New Configuration Options + +- dataverse.auth.oidc.enabled +- dataverse.auth.oidc.client-id +- dataverse.auth.oidc.client-secret +- dataverse.auth.oidc.auth-server-url +- dataverse.auth.oidc.pkce.enabled +- dataverse.auth.oidc.pkce.method +- dataverse.auth.oidc.title +- dataverse.auth.oidc.subtitle +- dataverse.auth.oidc.pkce.max-cache-size +- dataverse.auth.oidc.pkce.max-cache-age diff --git a/doc/sphinx-guides/source/installation/oidc.rst b/doc/sphinx-guides/source/installation/oidc.rst index e036e9c8470..4a0c77006e0 100644 --- a/doc/sphinx-guides/source/installation/oidc.rst +++ b/doc/sphinx-guides/source/installation/oidc.rst @@ -78,14 +78,14 @@ Enabling PKCE Security Many providers these days support or even require the usage of `PKCE `_ to safeguard against some attacks and enable public clients that cannot have a secure secret to still use OpenID Connect (or OAuth2). -The Dataverse built OIDC client can be enabled to use PKCE and which method to use when creating the code challenge. +The Dataverse-built OIDC client can be configured to use PKCE and the method to use when creating the code challenge can be specified. See also `this explanation of the flow `_ for details on how this works. As we are using the `Nimbus SDK `_ as our client -library, we support the standard ``PLAIN`` and ``S256`` code challenge methods. "SHA-256 method" is the default +library, we support the standard ``PLAIN`` and ``S256`` (SHA-256) code challenge methods. "SHA-256 method" is the default as recommend in `RFC7636 `_. If your provider needs some -other method (unlikely), please open an issue. +other method, please open an issue. The provisioning sections below contain in the example the parameters you may use to configure PKCE. @@ -169,11 +169,11 @@ The following options are available: - N - ``OpenID Connect`` * - ``dataverse.auth.oidc.pkce.max-cache-size`` - - Tune the maximum size of all OIDC providers' verifier cache (= number of outstanding PKCE-enabled auth responses). + - Tune the maximum size of all OIDC providers' verifier cache (the number of outstanding PKCE-enabled auth responses). - N - 10000 * - ``dataverse.auth.oidc.pkce.max-cache-age`` - - Tune the maximum age of all OIDC providers' verifier cache entries. Default is 5 minutes, equivalent to lifetime + - Tune the maximum age, in seconds, of all OIDC providers' verifier cache entries. Default is 5 minutes, equivalent to lifetime of many OIDC access tokens. - N - 300 \ No newline at end of file From 42d181216fd042d378768763209805f77e659149 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 2 Oct 2023 12:53:20 +0200 Subject: [PATCH 328/396] build(oidc): upgrade to Keycloak 22 in OIDC integration test #9268 --- pom.xml | 2 +- .../oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 13a834e9a67..e70b723cad5 100644 --- a/pom.xml +++ b/pom.xml @@ -609,7 +609,7 @@ com.github.dasniko testcontainers-keycloak - 2.5.0 + 3.0.0 test 0.43.4 - - - 5.0.0 diff --git a/pom.xml b/pom.xml index 909e9ee9b80..e70b723cad5 100644 --- a/pom.xml +++ b/pom.xml @@ -252,20 +252,6 @@ expressly provided - - - com.querydsl - querydsl-apt - ${querydsl.version} - jakarta - provided - - - com.querydsl - querydsl-jpa - ${querydsl.version} - jakarta - commons-io diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java index 351c4032939..f4f66d3c874 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java @@ -58,7 +58,7 @@ public enum TagType {Survey, TimeSeries, Panel, Event, Genomics, Network, Geospa private static final Map TagTypeToLabels = new HashMap<>(); - public static final Map TagLabelToTypes = new HashMap<>(); + private static final Map TagLabelToTypes = new HashMap<>(); static { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java deleted file mode 100644 index 6006d937100..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ /dev/null @@ -1,241 +0,0 @@ -package edu.harvard.iq.dataverse; - -import edu.harvard.iq.dataverse.QDataFileCategory; -import edu.harvard.iq.dataverse.QDataFileTag; -import edu.harvard.iq.dataverse.QDvObject; -import edu.harvard.iq.dataverse.QEmbargo; -import edu.harvard.iq.dataverse.QFileMetadata; - -import com.querydsl.core.Tuple; -import com.querydsl.core.types.dsl.BooleanExpression; -import com.querydsl.core.types.dsl.CaseBuilder; -import com.querydsl.core.types.dsl.DateExpression; -import com.querydsl.core.types.dsl.DateTimeExpression; - -import com.querydsl.jpa.impl.JPAQuery; -import com.querydsl.jpa.impl.JPAQueryFactory; - -import jakarta.ejb.Stateless; -import jakarta.inject.Named; -import jakarta.persistence.EntityManager; -import jakarta.persistence.PersistenceContext; - -import java.io.Serializable; -import java.sql.Timestamp; -import java.time.LocalDate; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static edu.harvard.iq.dataverse.DataFileTag.TagLabelToTypes; - -@Stateless -@Named -public class DatasetVersionFilesServiceBean implements Serializable { - - @PersistenceContext(unitName = "VDCNet-ejbPU") - private EntityManager em; - - private final QFileMetadata fileMetadata = QFileMetadata.fileMetadata; - private final QDvObject dvObject = QDvObject.dvObject; - private final QDataFileCategory dataFileCategory = QDataFileCategory.dataFileCategory; - private final QDataFileTag dataFileTag = QDataFileTag.dataFileTag; - - /** - * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} - */ - public enum FileMetadatasOrderCriteria { - NameAZ, NameZA, Newest, Oldest, Size, Type - } - - /** - * Status of the particular DataFile based on active embargoes and restriction state used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} - */ - public enum DataFileAccessStatus { - Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic - } - - /** - * Given a DatasetVersion, returns its total file metadata count - * - * @param datasetVersion the DatasetVersion to access - * @return long value of total file metadata count - */ - public long getFileMetadataCount(DatasetVersion datasetVersion) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - return queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())).stream().count(); - } - - /** - * Given a DatasetVersion, returns its file metadata count per content type - * - * @param datasetVersion the DatasetVersion to access - * @return Map of file metadata counts per content type - */ - public Map getFileMetadataCountPerContentType(DatasetVersion datasetVersion) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - List contentTypeOccurrences = queryFactory - .select(fileMetadata.dataFile.contentType, fileMetadata.count()) - .from(fileMetadata) - .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())) - .groupBy(fileMetadata.dataFile.contentType).fetch(); - Map result = new HashMap<>(); - for (Tuple occurrence : contentTypeOccurrences) { - result.put(occurrence.get(fileMetadata.dataFile.contentType), occurrence.get(fileMetadata.count())); - } - return result; - } - - /** - * Given a DatasetVersion, returns its file metadata count per category name - * - * @param datasetVersion the DatasetVersion to access - * @return Map of file metadata counts per category name - */ - public Map getFileMetadataCountPerCategoryName(DatasetVersion datasetVersion) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - List categoryNameOccurrences = queryFactory - .select(dataFileCategory.name, fileMetadata.count()) - .from(dataFileCategory, fileMetadata) - .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(fileMetadata.fileCategories.contains(dataFileCategory))) - .groupBy(dataFileCategory.name).fetch(); - Map result = new HashMap<>(); - for (Tuple occurrence : categoryNameOccurrences) { - result.put(occurrence.get(dataFileCategory.name), occurrence.get(fileMetadata.count())); - } - return result; - } - - /** - * Given a DatasetVersion, returns its file metadata count per DataFileAccessStatus - * - * @param datasetVersion the DatasetVersion to access - * @return Map of file metadata counts per DataFileAccessStatus - */ - public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion) { - Map allCounts = new HashMap<>(); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.Public); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.Restricted); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.EmbargoedThenPublic); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.EmbargoedThenRestricted); - return allCounts; - } - - /** - * Returns a FileMetadata list of files in the specified DatasetVersion - * - * @param datasetVersion the DatasetVersion to access - * @param limit for pagination, can be null - * @param offset for pagination, can be null - * @param contentType for retrieving only files with this content type - * @param accessStatus for retrieving only files with this DataFileAccessStatus - * @param categoryName for retrieving only files categorized with this category name - * @param tabularTagName for retrieving only files categorized with this tabular tag name - * @param searchText for retrieving only files that contain the specified text within their labels or descriptions - * @param orderCriteria a FileMetadatasOrderCriteria to order the results - * @return a FileMetadata list from the specified DatasetVersion - */ - public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, String contentType, DataFileAccessStatus accessStatus, String categoryName, String tabularTagName, String searchText, FileMetadatasOrderCriteria orderCriteria) { - JPAQuery baseQuery = createGetFileMetadatasBaseQuery(datasetVersion, orderCriteria); - - if (contentType != null) { - baseQuery.where(fileMetadata.dataFile.contentType.eq(contentType)); - } - if (accessStatus != null) { - baseQuery.where(createGetFileMetadatasAccessStatusExpression(accessStatus)); - } - if (categoryName != null) { - baseQuery.from(dataFileCategory).where(dataFileCategory.name.eq(categoryName).and(fileMetadata.fileCategories.contains(dataFileCategory))); - } - if (tabularTagName != null) { - baseQuery.from(dataFileTag).where(dataFileTag.type.eq(TagLabelToTypes.get(tabularTagName)).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))); - } - if (searchText != null && !searchText.isEmpty()) { - searchText = searchText.trim().toLowerCase(); - baseQuery.where(fileMetadata.label.lower().contains(searchText).or(fileMetadata.description.lower().contains(searchText))); - } - - applyOrderCriteriaToGetFileMetadatasQuery(baseQuery, orderCriteria); - - if (limit != null) { - baseQuery.limit(limit); - } - if (offset != null) { - baseQuery.offset(offset); - } - - return baseQuery.fetch(); - } - - private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, DataFileAccessStatus dataFileAccessStatus) { - long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus); - if (fileMetadataCount > 0) { - totalCounts.put(dataFileAccessStatus, fileMetadataCount); - } - } - - private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, DataFileAccessStatus accessStatus) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - return queryFactory - .selectFrom(fileMetadata) - .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(createGetFileMetadatasAccessStatusExpression(accessStatus))) - .stream().count(); - } - - private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion datasetVersion, FileMetadatasOrderCriteria orderCriteria) { - JPAQueryFactory queryFactory = new JPAQueryFactory(em); - JPAQuery baseQuery = queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); - if (orderCriteria == FileMetadatasOrderCriteria.Newest || orderCriteria == FileMetadatasOrderCriteria.Oldest) { - baseQuery.from(dvObject).where(dvObject.id.eq(fileMetadata.dataFile.id)); - } - return baseQuery; - } - - private BooleanExpression createGetFileMetadatasAccessStatusExpression(DataFileAccessStatus accessStatus) { - QEmbargo embargo = fileMetadata.dataFile.embargo; - BooleanExpression activelyEmbargoedExpression = embargo.dateAvailable.goe(DateExpression.currentDate(LocalDate.class)); - BooleanExpression inactivelyEmbargoedExpression = embargo.isNull(); - BooleanExpression accessStatusExpression; - switch (accessStatus) { - case EmbargoedThenRestricted: - accessStatusExpression = activelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isTrue()); - break; - case EmbargoedThenPublic: - accessStatusExpression = activelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isFalse()); - break; - case Restricted: - accessStatusExpression = inactivelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isTrue()); - break; - case Public: - accessStatusExpression = inactivelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isFalse()); - break; - default: - throw new IllegalStateException("Unexpected value: " + accessStatus); - } - return accessStatusExpression; - } - - private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery query, FileMetadatasOrderCriteria orderCriteria) { - DateTimeExpression orderByLifetimeExpression = new CaseBuilder().when(dvObject.publicationDate.isNotNull()).then(dvObject.publicationDate).otherwise(dvObject.createDate); - switch (orderCriteria) { - case NameZA: - query.orderBy(fileMetadata.label.desc()); - break; - case Newest: - query.orderBy(orderByLifetimeExpression.desc()); - break; - case Oldest: - query.orderBy(orderByLifetimeExpression.asc()); - break; - case Size: - query.orderBy(fileMetadata.dataFile.filesize.asc()); - break; - case Type: - query.orderBy(fileMetadata.dataFile.contentType.asc()); - break; - default: - query.orderBy(fileMetadata.label.asc()); - break; - } - } -} diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 5c43001dcb5..6f087f9eabc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -49,6 +49,22 @@ public class DatasetVersionServiceBean implements java.io.Serializable { private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL = "SELECT fm FROM FileMetadata fm" + + " WHERE fm.datasetVersion.id=:datasetVersionId" + + " ORDER BY fm.label"; + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE = "SELECT fm FROM FileMetadata fm, DvObject dvo" + + " WHERE fm.datasetVersion.id = :datasetVersionId" + + " AND fm.dataFile.id = dvo.id" + + " ORDER BY CASE WHEN dvo.publicationDate IS NOT NULL THEN dvo.publicationDate ELSE dvo.createDate END"; + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE = "SELECT fm FROM FileMetadata fm, DataFile df" + + " WHERE fm.datasetVersion.id = :datasetVersionId" + + " AND fm.dataFile.id = df.id" + + " ORDER BY df.filesize"; + private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE = "SELECT fm FROM FileMetadata fm, DataFile df" + + " WHERE fm.datasetVersion.id = :datasetVersionId" + + " AND fm.dataFile.id = df.id" + + " ORDER BY df.contentType"; + @EJB DatasetServiceBean datasetService; @@ -150,6 +166,18 @@ public DatasetVersion getDatasetVersion(){ } } // end RetrieveDatasetVersionResponse + /** + * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionServiceBean#getFileMetadatas} + */ + public enum FileMetadatasOrderCriteria { + NameAZ, + NameZA, + Newest, + Oldest, + Size, + Type + } + public DatasetVersion find(Object pk) { return em.find(DatasetVersion.class, pk); } @@ -1224,4 +1252,50 @@ public List getUnarchivedDatasetVersions(){ return null; } } // end getUnarchivedDatasetVersions + + /** + * Returns a FileMetadata list of files in the specified DatasetVersion + * + * @param datasetVersion the DatasetVersion to access + * @param limit for pagination, can be null + * @param offset for pagination, can be null + * @param orderCriteria a FileMetadatasOrderCriteria to order the results + * @return a FileMetadata list of the specified DatasetVersion + */ + public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileMetadatasOrderCriteria orderCriteria) { + TypedQuery query = em.createQuery(getQueryStringFromFileMetadatasOrderCriteria(orderCriteria), FileMetadata.class) + .setParameter("datasetVersionId", datasetVersion.getId()); + if (limit != null) { + query.setMaxResults(limit); + } + if (offset != null) { + query.setFirstResult(offset); + } + return query.getResultList(); + } + + private String getQueryStringFromFileMetadatasOrderCriteria(FileMetadatasOrderCriteria orderCriteria) { + String queryString; + switch (orderCriteria) { + case NameZA: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL + " DESC"; + break; + case Newest: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE + " DESC"; + break; + case Oldest: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE; + break; + case Size: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE; + break; + case Type: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE; + break; + default: + queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL; + break; + } + return queryString; + } } // end class diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 3c226e68472..1aa3f4ffde6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -1681,47 +1681,7 @@ public Response rejectFileAccess(@Context ContainerRequestContext crc, @PathPara return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.fileAccess.rejectFailure.noRequest", args)); } } - - @GET - @AuthRequired - @Path("/datafile/{id}/userFileAccessRequested") - public Response getUserFileAccessRequested(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { - DataFile dataFile; - AuthenticatedUser requestAuthenticatedUser; - try { - dataFile = findDataFileOrDie(dataFileId); - requestAuthenticatedUser = getRequestAuthenticatedUserOrDie(crc); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - boolean fileAccessRequested = false; - List requests = dataFile.getFileAccessRequests(); - for (FileAccessRequest fileAccessRequest : requests) { - if (fileAccessRequest.getRequester().getId().equals(requestAuthenticatedUser.getId())) { - fileAccessRequested = true; - break; - } - } - return ok(fileAccessRequested); - } - - @GET - @AuthRequired - @Path("/datafile/{id}/userPermissions") - public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { - DataFile dataFile; - try { - dataFile = findDataFileOrDie(dataFileId); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - User requestUser = getRequestUser(crc); - jsonObjectBuilder.add("canDownloadFile", fileDownloadService.canDownloadFile(createDataverseRequest(requestUser), dataFile)); - jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset)); - return ok(jsonObjectBuilder); - } - + // checkAuthorization is a convenience method; it calls the boolean method // isAccessAuthorized(), the actual workhorse, tand throws a 403 exception if not. @@ -1989,4 +1949,21 @@ private URI handleCustomZipDownload(User user, String customZipServiceUrl, Strin } return redirectUri; } + + @GET + @AuthRequired + @Path("/datafile/{id}/userPermissions") + public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + DataFile dataFile; + try { + dataFile = findDataFileOrDie(dataFileId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + User requestUser = getRequestUser(crc); + jsonObjectBuilder.add("canDownloadFile", fileDownloadService.canDownloadFile(createDataverseRequest(requestUser), dataFile)); + jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset)); + return ok(jsonObjectBuilder); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index b3be55399d8..704ec8f1989 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -236,9 +236,6 @@ public class Datasets extends AbstractApiBean { @Inject PrivateUrlServiceBean privateUrlService; - @Inject - DatasetVersionFilesServiceBean datasetVersionFilesServiceBean; - /** * Used to consolidate the way we parse and handle dataset versions. * @param @@ -487,56 +484,23 @@ public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id" : ok(json(dsv)); }, getRequestUser(crc)); } - + @GET @AuthRequired @Path("{id}/versions/{versionId}/files") - public Response getVersionFiles(@Context ContainerRequestContext crc, - @PathParam("id") String datasetId, - @PathParam("versionId") String versionId, - @QueryParam("limit") Integer limit, - @QueryParam("offset") Integer offset, - @QueryParam("contentType") String contentType, - @QueryParam("accessStatus") String accessStatus, - @QueryParam("categoryName") String categoryName, - @QueryParam("tabularTagName") String tabularTagName, - @QueryParam("searchText") String searchText, - @QueryParam("orderCriteria") String orderCriteria, - @Context UriInfo uriInfo, - @Context HttpHeaders headers) { - return response(req -> { + public Response getVersionFiles(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset, @QueryParam("orderCriteria") String orderCriteria, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response( req -> { DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; + DatasetVersionServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; try { - fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameAZ; + fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameAZ; } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); } - DatasetVersionFilesServiceBean.DataFileAccessStatus dataFileAccessStatus; - try { - dataFileAccessStatus = accessStatus != null ? DatasetVersionFilesServiceBean.DataFileAccessStatus.valueOf(accessStatus) : null; - } catch (IllegalArgumentException e) { - return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); - } - return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, contentType, dataFileAccessStatus, categoryName, tabularTagName, searchText, fileMetadatasOrderCriteria))); + return ok(jsonFileMetadatas(datasetversionService.getFileMetadatas(datasetVersion, limit, offset, fileMetadatasOrderCriteria))); }, getRequestUser(crc)); } - - @GET - @AuthRequired - @Path("{id}/versions/{versionId}/files/counts") - public Response getVersionFileCounts(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response(req -> { - DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion)); - jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion))); - jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion))); - jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion))); - return ok(jsonObjectBuilder); - }, getRequestUser(crc)); - } - + @GET @AuthRequired @Path("{id}/dirindex") diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 8a9abe68d85..82811162d52 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -4,7 +4,6 @@ import com.google.gson.JsonObject; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileServiceBean; -import edu.harvard.iq.dataverse.DataFileTag; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.DatasetServiceBean; @@ -55,7 +54,6 @@ import java.io.IOException; import java.io.InputStream; -import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -65,12 +63,15 @@ import jakarta.ejb.EJBException; import jakarta.inject.Inject; import jakarta.json.Json; -import jakarta.json.JsonArray; -import jakarta.json.JsonString; -import jakarta.json.JsonValue; -import jakarta.json.stream.JsonParsingException; import jakarta.servlet.http.HttpServletResponse; -import jakarta.ws.rs.*; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.HttpHeaders; @@ -111,8 +112,6 @@ public class Files extends AbstractApiBean { MakeDataCountLoggingServiceBean mdcLogService; @Inject GuestbookResponseServiceBean guestbookResponseService; - @Inject - DataFileServiceBean dataFileServiceBean; private static final Logger logger = Logger.getLogger(Files.class.getName()); @@ -853,85 +852,18 @@ public Response getFileDataTables(@Context ContainerRequestContext crc, @PathPar try { dataFile = findDataFileOrDie(dataFileId); } catch (WrappedResponse e) { - return notFound("File not found for given id."); + return error(Response.Status.NOT_FOUND, "File not found for given id."); } if (dataFile.isRestricted() || FileUtil.isActivelyEmbargoed(dataFile)) { DataverseRequest dataverseRequest = createDataverseRequest(getRequestUser(crc)); boolean hasPermissionToDownloadFile = permissionSvc.requestOn(dataverseRequest, dataFile).has(Permission.DownloadFile); if (!hasPermissionToDownloadFile) { - return forbidden("Insufficient permissions to access the requested information."); + return error(FORBIDDEN, "Insufficient permissions to access the requested information."); } } if (!dataFile.isTabularData()) { - return badRequest(BundleUtil.getStringFromBundle("files.api.only.tabular.supported")); + return error(BAD_REQUEST, "This operation is only available for tabular files."); } return ok(jsonDT(dataFile.getDataTables())); } - - @POST - @AuthRequired - @Path("{id}/metadata/categories") - @Produces(MediaType.APPLICATION_JSON) - public Response setFileCategories(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId, String jsonBody) { - return response(req -> { - DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); - jakarta.json.JsonObject jsonObject; - try (StringReader stringReader = new StringReader(jsonBody)) { - jsonObject = Json.createReader(stringReader).readObject(); - JsonArray requestedCategoriesJson = jsonObject.getJsonArray("categories"); - FileMetadata fileMetadata = dataFile.getFileMetadata(); - for (JsonValue jsonValue : requestedCategoriesJson) { - JsonString jsonString = (JsonString) jsonValue; - fileMetadata.addCategoryByName(jsonString.getString()); - } - execCommand(new UpdateDatasetVersionCommand(fileMetadata.getDataFile().getOwner(), req)); - return ok("Categories of file " + dataFileId + " updated."); - } catch (JsonParsingException jpe) { - return badRequest("Error parsing Json: " + jpe.getMessage()); - } - }, getRequestUser(crc)); - } - - @POST - @AuthRequired - @Path("{id}/metadata/tabularTags") - @Produces(MediaType.APPLICATION_JSON) - public Response setFileTabularTags(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId, String jsonBody) { - return response(req -> { - DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); - if (!dataFile.isTabularData()) { - return badRequest(BundleUtil.getStringFromBundle("files.api.only.tabular.supported")); - } - jakarta.json.JsonObject jsonObject; - try (StringReader stringReader = new StringReader(jsonBody)) { - jsonObject = Json.createReader(stringReader).readObject(); - JsonArray requestedTabularTagsJson = jsonObject.getJsonArray("tabularTags"); - for (JsonValue jsonValue : requestedTabularTagsJson) { - JsonString jsonString = (JsonString) jsonValue; - DataFileTag tag = new DataFileTag(); - try { - tag.setTypeByLabel(jsonString.getString()); - } catch (IllegalArgumentException iax) { - return badRequest(iax.getMessage()); - } - tag.setDataFile(dataFile); - dataFile.addTag(tag); - } - execCommand(new UpdateDatasetVersionCommand(dataFile.getOwner(), req)); - return ok("Tabular tags of file " + dataFileId + " updated."); - } catch (JsonParsingException jpe) { - return badRequest("Error parsing Json: " + jpe.getMessage()); - } - }, getRequestUser(crc)); - } - - @GET - @AuthRequired - @Path("{id}/hasBeenDeleted") - public Response getHasBeenDeleted(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { - return response(req -> { - DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); - return ok(dataFileServiceBean.hasBeenDeleted(dataFile)); - }, getRequestUser(crc)); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 1fed0b233e4..a7aa36f179e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -672,14 +672,9 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo //--------------------------------------------- .add("md5", getMd5IfItExists(df.getChecksumType(), df.getChecksumValue())) .add("checksum", getChecksumTypeAndValue(df.getChecksumType(), df.getChecksumValue())) - .add("tabularData", df.isTabularData()) .add("tabularTags", getTabularFileTags(df)) .add("creationDate", df.getCreateDateFormattedYYYYMMDD()) .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD()); - Dataset dfOwner = df.getOwner(); - if (dfOwner != null) { - builder.add("fileAccessRequest", dfOwner.isFileAccessRequest()); - } /* * The restricted state was not included prior to #9175 so to avoid backward * incompatability, it is now only added when generating json for the @@ -1100,22 +1095,6 @@ public Set characteristics() { }; } - public static JsonObjectBuilder json(Map map) { - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - for (Map.Entry mapEntry : map.entrySet()) { - jsonObjectBuilder.add(mapEntry.getKey(), mapEntry.getValue()); - } - return jsonObjectBuilder; - } - - public static JsonObjectBuilder jsonFileCountPerAccessStatusMap(Map map) { - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - for (Map.Entry mapEntry : map.entrySet()) { - jsonObjectBuilder.add(mapEntry.getKey().toString(), mapEntry.getValue()); - } - return jsonObjectBuilder; - } - public static Collector, JsonArrayBuilder> toJsonArray() { return new Collector, JsonArrayBuilder>() { diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 7b4befcca36..ac725caf1b2 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2620,7 +2620,6 @@ admin.api.deleteUser.success=Authenticated User {0} deleted. #Files.java files.api.metadata.update.duplicateFile=Filename already exists at {0} files.api.no.draft=No draft available for this file -files.api.only.tabular.supported=This operation is only available for tabular files. #Datasets.java datasets.api.updatePIDMetadata.failure.dataset.must.be.released=Modify Registration Metadata must be run on a published dataset. diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index 416caa68566..b1beddd893f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -26,9 +26,11 @@ import static jakarta.ws.rs.core.Response.Status.*; import static org.hamcrest.MatcherAssert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; -import static org.junit.jupiter.api.Assertions.*; /** * @@ -630,37 +632,28 @@ public void testZipUploadAndDownload() throws IOException { } @Test - public void testGetUserFileAccessRequested() { - // Create new user - Response createUserResponse = UtilIT.createRandomUser(); - createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); - String newUserApiToken = UtilIT.getApiTokenFromResponse(createUserResponse); - - String dataFileId = Integer.toString(tabFile3IdRestricted); - - // Call with new user and unrequested access file - Response getUserFileAccessRequestedResponse = UtilIT.getUserFileAccessRequested(dataFileId, newUserApiToken); - getUserFileAccessRequestedResponse.then().assertThat().statusCode(OK.getStatusCode()); + public void testGetUserPermissionsOnFile() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); - boolean userFileAccessRequested = JsonPath.from(getUserFileAccessRequestedResponse.body().asString()).getBoolean("data"); - assertFalse(userFileAccessRequested); + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - // Request file access for the new user - Response requestFileAccessResponse = UtilIT.requestFileAccess(dataFileId, newUserApiToken); - requestFileAccessResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - // Call with new user and requested access file - getUserFileAccessRequestedResponse = UtilIT.getUserFileAccessRequested(dataFileId, newUserApiToken); - getUserFileAccessRequestedResponse.then().assertThat().statusCode(OK.getStatusCode()); + // Upload test file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); - userFileAccessRequested = JsonPath.from(getUserFileAccessRequestedResponse.body().asString()).getBoolean("data"); - assertTrue(userFileAccessRequested); - } + // Assert user permissions on file + int testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); + Response getUserPermissionsOnFileResponse = UtilIT.getUserPermissionsOnFile(Integer.toString(testFileId), apiToken); - @Test - public void testGetUserPermissionsOnFile() { - // Call with valid file id - Response getUserPermissionsOnFileResponse = UtilIT.getUserPermissionsOnFile(Integer.toString(basicFileId), apiToken); getUserPermissionsOnFileResponse.then().assertThat().statusCode(OK.getStatusCode()); boolean canDownloadFile = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canDownloadFile"); assertTrue(canDownloadFile); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index b9f09cc7c07..3b6d4d1ecdf 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1,6 +1,6 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean; +import edu.harvard.iq.dataverse.DatasetVersionServiceBean; import io.restassured.RestAssured; import static io.restassured.RestAssured.given; @@ -9,9 +9,6 @@ import io.restassured.http.ContentType; import io.restassured.response.Response; -import java.time.LocalDate; -import java.time.format.DateTimeFormatter; -import java.util.*; import java.util.logging.Logger; import org.junit.jupiter.api.AfterAll; @@ -20,6 +17,8 @@ import org.skyscreamer.jsonassert.JSONAssert; import org.junit.jupiter.api.Disabled; +import java.util.List; +import java.util.Map; import jakarta.json.JsonObject; import static jakarta.ws.rs.core.Response.Status.CREATED; @@ -40,6 +39,8 @@ import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import java.util.UUID; + import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; @@ -66,7 +67,8 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.Files; - +import java.util.ArrayList; +import java.util.HashMap; import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObjectBuilder; @@ -75,7 +77,6 @@ import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; -import static java.lang.Thread.sleep; import static org.junit.jupiter.api.Assertions.assertEquals; import org.hamcrest.CoreMatchers; @@ -116,9 +117,7 @@ public static void setUpClass() { Response removeAnonymizedFieldTypeNames = UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); removeAnonymizedFieldTypeNames.then().assertThat() .statusCode(200); - - UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); - + /* With Dual mode, we can no longer mess with upload methods since native is now required for anything to work Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); @@ -145,9 +144,7 @@ public static void afterClass() { Response removeAnonymizedFieldTypeNames = UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); removeAnonymizedFieldTypeNames.then().assertThat() .statusCode(200); - - UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); - + /* See above Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); removeDcmUrl.then().assertThat() @@ -3267,7 +3264,8 @@ public void getDatasetVersionCitation() { .body("data.message", containsString("DRAFT VERSION")); } - public void getVersionFiles() throws IOException, InterruptedException { + @Test + public void getVersionFiles() throws IOException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -3299,42 +3297,39 @@ public void getVersionFiles() throws IOException, InterruptedException { int testPageSize = 2; // Test page 1 - Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, null, null, null, null, null, apiToken); + Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, apiToken); + + int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + assertEquals(testPageSize, fileMetadatasCount); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName1)) .body("data[1].label", equalTo(testFileName2)); - int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); - assertEquals(testPageSize, fileMetadatasCount); - - String testFileId1 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[0].dataFile.id"); - String testFileId2 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[1].dataFile.id"); - // Test page 2 - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, null, null, null, null, null, apiToken); + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, apiToken); + + fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + assertEquals(testPageSize, fileMetadatasCount); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName3)) .body("data[1].label", equalTo(testFileName4)); - fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); - assertEquals(testPageSize, fileMetadatasCount); - // Test page 3 (last) - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, null, null, null, null, null, apiToken); + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, apiToken); + + fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName5)); - fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); - // Test NameZA order criteria - Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); + Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); getVersionFilesResponseNameZACriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3345,7 +3340,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName1)); // Test Newest order criteria - Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); + Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); getVersionFilesResponseNewestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3356,7 +3351,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName1)); // Test Oldest order criteria - Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); + Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); getVersionFilesResponseOldestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3367,7 +3362,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName4)); // Test Size order criteria - Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); + Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); getVersionFilesResponseSizeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3378,7 +3373,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName4)); // Test Type order criteria - Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); + Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); getVersionFilesResponseTypeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3390,186 +3385,9 @@ public void getVersionFiles() throws IOException, InterruptedException { // Test invalid order criteria String invalidOrderCriteria = "invalidOrderCriteria"; - Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, invalidOrderCriteria, apiToken); + Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, invalidOrderCriteria, apiToken); getVersionFilesResponseInvalidOrderCriteria.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid order criteria: " + invalidOrderCriteria)); - - // Test Content Type - Response getVersionFilesResponseContentType = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, "image/png", null, null, null, null, null, apiToken); - - getVersionFilesResponseContentType.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo(testFileName5)); - - fileMetadatasCount = getVersionFilesResponseContentType.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); - - // Test Category Name - String testCategory = "testCategory"; - Response setFileCategoriesResponse = UtilIT.setFileCategories(testFileId1, apiToken, List.of(testCategory)); - setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); - setFileCategoriesResponse = UtilIT.setFileCategories(testFileId2, apiToken, List.of(testCategory)); - setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); - - Response getVersionFilesResponseCategoryName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, testCategory, null, null, null, apiToken); - - getVersionFilesResponseCategoryName.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo(testFileName1)) - .body("data[1].label", equalTo(testFileName2)); - - fileMetadatasCount = getVersionFilesResponseCategoryName.jsonPath().getList("data").size(); - assertEquals(2, fileMetadatasCount); - - // Test Access Status Restricted - Response restrictFileResponse = UtilIT.restrictFile(String.valueOf(testFileId1), true, apiToken); - restrictFileResponse.then().assertThat() - .statusCode(OK.getStatusCode()); - - Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Restricted.toString(), null, null, null, null, apiToken); - - getVersionFilesResponseRestricted.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo(testFileName1)); - - fileMetadatasCount = getVersionFilesResponseRestricted.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); - - // Test Access Status Embargoed - UtilIT.setSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths, "12"); - String activeEmbargoDate = LocalDate.now().plusMonths(6).format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); - - // Create embargo for test file 1 (Embargoed and Restricted) - Response createActiveFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(testFileId1), activeEmbargoDate, apiToken); - - createActiveFileEmbargoResponse.then().assertThat() - .statusCode(OK.getStatusCode()); - - // Create embargo for test file 2 (Embargoed and Public) - createActiveFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(testFileId2), activeEmbargoDate, apiToken); - - createActiveFileEmbargoResponse.then().assertThat() - .statusCode(OK.getStatusCode()); - - Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, null, apiToken); - - getVersionFilesResponseEmbargoedThenPublic.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo(testFileName2)); - - fileMetadatasCount = getVersionFilesResponseEmbargoedThenPublic.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); - - Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, null, apiToken); - - getVersionFilesResponseEmbargoedThenRestricted.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo(testFileName1)); - - fileMetadatasCount = getVersionFilesResponseEmbargoedThenRestricted.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); - - // Test Access Status Public - Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString(), null, null, null, null, apiToken); - - getVersionFilesResponsePublic.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo(testFileName3)) - .body("data[1].label", equalTo(testFileName4)) - .body("data[2].label", equalTo(testFileName5)); - - fileMetadatasCount = getVersionFilesResponsePublic.jsonPath().getList("data").size(); - assertEquals(3, fileMetadatasCount); - - // Test Search Text - Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, "test_1", null, apiToken); - - getVersionFilesResponseSearchText.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo(testFileName1)); - - fileMetadatasCount = getVersionFilesResponseSearchText.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); - - // Test Tabular Tag Name - String pathToTabularTestFile = "src/test/resources/tab/test.tab"; - Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); - uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); - - String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); - - // Ensure tabular file is ingested - sleep(2000); - - String tabularTagName = "Survey"; - Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(tabularTagName)); - setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode()); - - Response getVersionFilesResponseTabularTagName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, tabularTagName, null, null, apiToken); - - getVersionFilesResponseTabularTagName.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data[0].label", equalTo("test.tab")); - - fileMetadatasCount = getVersionFilesResponseTabularTagName.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); - } - - @Test - public void getVersionFileCounts() throws IOException { - Response createUser = UtilIT.createRandomUser(); - createUser.then().assertThat().statusCode(OK.getStatusCode()); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); - - Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); - createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - - Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); - createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); - int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - - // Creating test files - String testFileName1 = "test_1.txt"; - String testFileName2 = "test_2.txt"; - String testFileName3 = "test_3.png"; - - UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName1, new byte[50], apiToken); - UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName2, new byte[200], apiToken); - UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName3, new byte[100], apiToken); - - // Creating a categorized test file - String pathToTestFile = "src/test/resources/images/coffeeshop.png"; - Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); - uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); - String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); - String testCategory = "testCategory"; - Response setFileCategoriesResponse = UtilIT.setFileCategories(dataFileId, apiToken, List.of(testCategory)); - setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // Setting embargo for file (Embargo and Public) - UtilIT.setSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths, "12"); - String activeEmbargoDate = LocalDate.now().plusMonths(6).format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); - Response createFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(dataFileId), activeEmbargoDate, apiToken); - createFileEmbargoResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // Getting the file counts and assert each count - Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, ":latest", apiToken); - - getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); - - JsonPath responseJsonPath = getVersionFileCountsResponse.jsonPath(); - LinkedHashMap responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); - LinkedHashMap responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); - LinkedHashMap responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); - - assertEquals(4, (Integer) responseJsonPath.get("data.total")); - assertEquals(2, responseCountPerContentTypeMap.get("image/png")); - assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); - assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); - assertEquals(3, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString())); - assertEquals(1, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString())); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index bfe856a8d18..0a16bca7008 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2,8 +2,6 @@ import io.restassured.RestAssured; import io.restassured.response.Response; - -import java.util.List; import java.util.logging.Logger; import edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism; @@ -32,12 +30,16 @@ import static jakarta.ws.rs.core.Response.Status.*; import org.hamcrest.CoreMatchers; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.startsWith; +import static org.hamcrest.CoreMatchers.nullValue; import org.hamcrest.Matchers; import org.junit.jupiter.api.AfterAll; -import static org.hamcrest.CoreMatchers.*; -import static org.hamcrest.CoreMatchers.hasItem; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; public class FilesIT { @@ -2209,137 +2211,4 @@ public void testGetFileDataTables() throws InterruptedException { getFileDataTablesForTabularFileResponse = UtilIT.getFileDataTables(testTabularFileId, randomUserApiToken); getFileDataTablesForTabularFileResponse.then().assertThat().statusCode(FORBIDDEN.getStatusCode()); } - - @Test - public void testSetFileCategories() { - Response createUser = UtilIT.createRandomUser(); - createUser.then().assertThat().statusCode(OK.getStatusCode()); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); - - Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); - createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - - Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); - createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - - // Upload test file - String pathToTestFile = "src/test/resources/images/coffeeshop.png"; - Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); - uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); - - String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); - - // Set categories - String testCategory1 = "testCategory1"; - String testCategory2 = "testCategory2"; - List testCategories = List.of(testCategory1, testCategory2); - Response setFileCategoriesResponse = UtilIT.setFileCategories(dataFileId, apiToken, testCategories); - setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // Get file data and check for new categories - Response getFileDataResponse = UtilIT.getFileData(dataFileId, apiToken); - getFileDataResponse.prettyPrint(); - getFileDataResponse.then().assertThat() - .body("data.categories", hasItem(testCategory1)) - .body("data.categories", hasItem(testCategory2)) - .statusCode(OK.getStatusCode()); - } - - @Test - public void testSetFileTabularTags() throws InterruptedException { - Response createUser = UtilIT.createRandomUser(); - createUser.then().assertThat().statusCode(OK.getStatusCode()); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); - - Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); - createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - - Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); - createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - - // Upload tabular file - String pathToTabularTestFile = "src/test/resources/tab/test.tab"; - Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); - uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); - - String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); - - // Ensure tabular file is ingested - sleep(2000); - - // Set tabular tags - String testTabularTag1 = "Survey"; - String testTabularTag2 = "Genomics"; - List testTabularTags = List.of(testTabularTag1, testTabularTag2); - Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, testTabularTags); - setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // Get file data and check for new categories - Response getFileDataResponse = UtilIT.getFileData(tabularFileId, apiToken); - getFileDataResponse.then().assertThat() - .body("data.dataFile.tabularTags", hasItem(testTabularTag1)) - .body("data.dataFile.tabularTags", hasItem(testTabularTag2)) - .statusCode(OK.getStatusCode()); - - // Set invalid tabular tag - String testInvalidTabularTag = "Invalid"; - setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(testInvalidTabularTag)); - setFileTabularTagsResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); - - // Get file data and check categories are unaltered - getFileDataResponse = UtilIT.getFileData(tabularFileId, apiToken); - getFileDataResponse.then().assertThat() - .body("data.dataFile.tabularTags", hasItem(testTabularTag1)) - .body("data.dataFile.tabularTags", hasItem(testTabularTag2)) - .statusCode(OK.getStatusCode()); - } - - @Test - public void testGetHasBeenDeleted() { - Response createUser = UtilIT.createRandomUser(); - createUser.then().assertThat().statusCode(OK.getStatusCode()); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); - - Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); - createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - - Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); - createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - - // Upload test file - String pathToTestFile = "src/test/resources/images/coffeeshop.png"; - Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); - uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); - - String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); - - // Publish dataverse and dataset - Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); - publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); - - Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); - publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // Assert that the file has not been deleted - Response getHasBeenDeletedResponse = UtilIT.getHasBeenDeleted(dataFileId, apiToken); - getHasBeenDeletedResponse.then().assertThat().statusCode(OK.getStatusCode()); - boolean fileHasBeenDeleted = JsonPath.from(getHasBeenDeletedResponse.body().asString()).getBoolean("data"); - assertFalse(fileHasBeenDeleted); - - // Delete test file - Response deleteFileInDatasetResponse = UtilIT.deleteFileInDataset(Integer.parseInt(dataFileId), apiToken); - deleteFileInDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // Assert that the file has been deleted - getHasBeenDeletedResponse = UtilIT.getHasBeenDeleted(dataFileId, apiToken); - getHasBeenDeletedResponse.then().assertThat().statusCode(OK.getStatusCode()); - fileHasBeenDeleted = JsonPath.from(getHasBeenDeletedResponse.body().asString()).getBoolean("data"); - assertTrue(fileHasBeenDeleted); - } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 8e333451c8d..f61b392c898 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3276,7 +3276,7 @@ static Response getDatasetVersionCitation(Integer datasetId, String version, Str return response; } - static Response getVersionFiles(Integer datasetId, String version, Integer limit, Integer offset, String contentType, String accessStatus, String categoryName, String tabularTagName, String searchText, String orderCriteria, String apiToken) { + static Response getVersionFiles(Integer datasetId, String version, Integer limit, Integer offset, String orderCriteria, String apiToken) { RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken) .contentType("application/json"); @@ -3286,21 +3286,6 @@ static Response getVersionFiles(Integer datasetId, String version, Integer limit if (offset != null) { requestSpecification = requestSpecification.queryParam("offset", offset); } - if (contentType != null) { - requestSpecification = requestSpecification.queryParam("contentType", contentType); - } - if (accessStatus != null) { - requestSpecification = requestSpecification.queryParam("accessStatus", accessStatus); - } - if (categoryName != null) { - requestSpecification = requestSpecification.queryParam("categoryName", categoryName); - } - if (tabularTagName != null) { - requestSpecification = requestSpecification.queryParam("tabularTagName", tabularTagName); - } - if (searchText != null) { - requestSpecification = requestSpecification.queryParam("searchText", searchText); - } if (orderCriteria != null) { requestSpecification = requestSpecification.queryParam("orderCriteria", orderCriteria); } @@ -3332,75 +3317,9 @@ static Response getFileDataTables(String dataFileId, String apiToken) { .get("/api/files/" + dataFileId + "/dataTables"); } - static Response getUserFileAccessRequested(String dataFileId, String apiToken) { - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/access/datafile/" + dataFileId + "/userFileAccessRequested"); - } - static Response getUserPermissionsOnFile(String dataFileId, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) .get("/api/access/datafile/" + dataFileId + "/userPermissions"); } - - static Response createFileEmbargo(Integer datasetId, Integer fileId, String dateAvailable, String apiToken) { - JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); - jsonBuilder.add("dateAvailable", dateAvailable); - jsonBuilder.add("reason", "This is a test embargo"); - jsonBuilder.add("fileIds", Json.createArrayBuilder().add(fileId)); - String jsonString = jsonBuilder.build().toString(); - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .body(jsonString) - .contentType("application/json") - .urlEncodingEnabled(false) - .post("/api/datasets/" + datasetId + "/files/actions/:set-embargo"); - } - - static Response getVersionFileCounts(Integer datasetId, String version, String apiToken) { - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); - } - - static Response setFileCategories(String dataFileId, String apiToken, List categories) { - JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder(); - for (String category : categories) { - jsonArrayBuilder.add(category); - } - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - jsonObjectBuilder.add("categories", jsonArrayBuilder); - String jsonString = jsonObjectBuilder.build().toString(); - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .body(jsonString) - .post("/api/files/" + dataFileId + "/metadata/categories"); - } - - static Response setFileTabularTags(String dataFileId, String apiToken, List tabularTags) { - JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder(); - for (String tabularTag : tabularTags) { - jsonArrayBuilder.add(tabularTag); - } - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - jsonObjectBuilder.add("tabularTags", jsonArrayBuilder); - String jsonString = jsonObjectBuilder.build().toString(); - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .body(jsonString) - .post("/api/files/" + dataFileId + "/metadata/tabularTags"); - } - - static Response deleteFileInDataset(Integer fileId, String apiToken) { - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .delete("/api/files/" + fileId); - } - - static Response getHasBeenDeleted(String dataFileId, String apiToken) { - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/files/" + dataFileId + "/hasBeenDeleted"); - } } From 87393bae0e2f40b41693d7a3cecc64785e81df36 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 4 Oct 2023 15:05:57 +0100 Subject: [PATCH 360/396] Added: docs for filtering by tabular tag in getVersionFiles endpoint --- doc/sphinx-guides/source/api/native-api.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index d4e2e4cd178..f78ae62f3ae 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -978,6 +978,14 @@ Usage example: curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?categoryName=Data" +Tabular tag name filtering is also optionally supported. To return files to which the requested tabular tag has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?tabularTagName=Survey" + Content type filtering is also optionally supported. To return files matching the requested content type. Usage example: From 9f0b8102904bb663dce8c50203d32663550e2095 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Oct 2023 10:47:54 -0400 Subject: [PATCH 361/396] more tests and docs #9952, #9953, #9957 --- doc/release-notes/9955-Signposting-updates.md | 8 +++++++- doc/sphinx-guides/source/api/native-api.rst | 2 +- .../edu/harvard/iq/dataverse/api/SignpostingIT.java | 11 +++++++++++ 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/9955-Signposting-updates.md b/doc/release-notes/9955-Signposting-updates.md index 92168231895..db0e27e51c5 100644 --- a/doc/release-notes/9955-Signposting-updates.md +++ b/doc/release-notes/9955-Signposting-updates.md @@ -1 +1,7 @@ -This release fixes several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification. \ No newline at end of file +This release fixes several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification. These changes introduce backward-incompatibility, but since Signposting support was added recently (in Dataverse 5.14 in PR #8981), we feel it's best to do this clean up and not support the old implementation that was not fully compliant with the spec. + +To fix #9952, we surround the license info with `<` and `>`. + +To fix #9953, we no longer wrap the response in a `{"status":"OK","data":{` JSON object. This has also been noted in the guides at https://dataverse-guide--9955.org.readthedocs.build/en/9955/api/native-api.html#retrieve-signposting-information + +To fix #9957, we corrected the mime/content type, changing it from `json+ld` to `ld+json`. For backward compatibility, we are still supporting the old one, for now. diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index e181a2a5546..bc186720252 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2200,7 +2200,7 @@ Here is an example of a "Link" header: The URL for linkset information is discoverable under the ``rel="linkset";type="application/linkset+json`` entry in the "Link" header, such as in the example above. -The reponse includes a JSON object conforming to the `Signposting `__ specification. +The reponse includes a JSON object conforming to the `Signposting `__ specification. As part of this conformance, unlike most Dataverse API responses, the output is not wrapped in a ``{"status":"OK","data":{`` object. Signposting is not supported for draft dataset versions. .. code-block:: bash diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java index b41f62ae28f..75f514f3398 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java @@ -80,6 +80,7 @@ public void testSignposting() { assertTrue(linkHeader.contains(datasetPid)); assertTrue(linkHeader.contains("cite-as")); assertTrue(linkHeader.contains("describedby")); + assertTrue(linkHeader.contains(";rel=\"license\"")); Pattern pattern = Pattern.compile("<([^<]*)> ; rel=\"linkset\";type=\"application\\/linkset\\+json\""); Matcher matcher = pattern.matcher(linkHeader); @@ -101,6 +102,16 @@ public void testSignposting() { assertTrue(lso.getString("anchor").indexOf("/dataset.xhtml?persistentId=" + datasetPid) > 0); assertTrue(lso.containsKey("describedby")); + // Test export URL from link header + // regex inspired by https://stackoverflow.com/questions/68860255/how-to-match-the-closest-opening-and-closing-brackets + Pattern exporterPattern = Pattern.compile("[<\\[][^()\\[\\]]*?exporter=schema.org[^()\\[\\]]*[>\\]]"); + Matcher exporterMatcher = exporterPattern.matcher(linkHeader); + exporterMatcher.find(); + + Response exportDataset = UtilIT.exportDataset(datasetPid, "schema.org"); + exportDataset.prettyPrint(); + exportDataset.then().assertThat().statusCode(OK.getStatusCode()); + } } From 5b3630f4bf68a579cbaaf34cd4abf20e278112cb Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 4 Oct 2023 16:22:56 +0100 Subject: [PATCH 362/396] Revert "Revert "Dataset files API extension for filters"" This reverts commit 122565a4f1afac9f5fe0d85a3c6f08517eb530c4. --- .../9714-files-api-extension-filters.md | 14 + .../9785-files-api-extension-search-text.md | 3 + .../9834-files-api-extension-counts.md | 6 + ...oad-extension-new-file-access-endpoints.md | 14 + doc/sphinx-guides/source/api/dataaccess.rst | 12 + doc/sphinx-guides/source/api/native-api.rst | 165 +++++++++++- modules/dataverse-parent/pom.xml | 3 + pom.xml | 14 + .../edu/harvard/iq/dataverse/DataFileTag.java | 2 +- .../DatasetVersionFilesServiceBean.java | 241 ++++++++++++++++++ .../dataverse/DatasetVersionServiceBean.java | 74 ------ .../edu/harvard/iq/dataverse/api/Access.java | 59 +++-- .../harvard/iq/dataverse/api/Datasets.java | 50 +++- .../edu/harvard/iq/dataverse/api/Files.java | 90 ++++++- .../iq/dataverse/util/json/JsonPrinter.java | 21 ++ src/main/java/propertyFiles/Bundle.properties | 1 + .../harvard/iq/dataverse/api/AccessIT.java | 47 ++-- .../harvard/iq/dataverse/api/DatasetsIT.java | 240 ++++++++++++++--- .../edu/harvard/iq/dataverse/api/FilesIT.java | 145 ++++++++++- .../edu/harvard/iq/dataverse/api/UtilIT.java | 83 +++++- 20 files changed, 1110 insertions(+), 174 deletions(-) create mode 100644 doc/release-notes/9714-files-api-extension-filters.md create mode 100644 doc/release-notes/9785-files-api-extension-search-text.md create mode 100644 doc/release-notes/9834-files-api-extension-counts.md create mode 100644 doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md create mode 100644 src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java diff --git a/doc/release-notes/9714-files-api-extension-filters.md b/doc/release-notes/9714-files-api-extension-filters.md new file mode 100644 index 00000000000..034230efe61 --- /dev/null +++ b/doc/release-notes/9714-files-api-extension-filters.md @@ -0,0 +1,14 @@ +The getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) has been extended to support optional filtering by: + +- Access status: through the `accessStatus` query parameter, which supports the following values: + + - Public + - Restricted + - EmbargoedThenRestricted + - EmbargoedThenPublic + + +- Category name: through the `categoryName` query parameter. To return files to which the particular category has been added. + + +- Content type: through the `contentType` query parameter. To return files matching the requested content type. For example: "image/png". diff --git a/doc/release-notes/9785-files-api-extension-search-text.md b/doc/release-notes/9785-files-api-extension-search-text.md new file mode 100644 index 00000000000..fb185e1c7af --- /dev/null +++ b/doc/release-notes/9785-files-api-extension-search-text.md @@ -0,0 +1,3 @@ +The getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) has been extended to support optional filtering by search text through the `searchText` query parameter. + +The search will be applied to the labels and descriptions of the dataset files. diff --git a/doc/release-notes/9834-files-api-extension-counts.md b/doc/release-notes/9834-files-api-extension-counts.md new file mode 100644 index 00000000000..3ec15d8bd36 --- /dev/null +++ b/doc/release-notes/9834-files-api-extension-counts.md @@ -0,0 +1,6 @@ +Implemented the following new endpoints: + +- getVersionFileCounts (/api/datasets/{id}/versions/{versionId}/files/counts): Given a dataset and its version, retrieves file counts based on different criteria (Total count, per content type, per access status and per category name). + + +- setFileCategories (/api/files/{id}/metadata/categories): Updates the categories (by name) for an existing file. If the specified categories do not exist, they will be created. diff --git a/doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md b/doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md new file mode 100644 index 00000000000..f306ae2ab80 --- /dev/null +++ b/doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md @@ -0,0 +1,14 @@ +Implemented the following new endpoints: + +- userFileAccessRequested (/api/access/datafile/{id}/userFileAccessRequested): Returns true or false depending on whether or not the calling user has requested access to a particular file. + + +- hasBeenDeleted (/api/files/{id}/hasBeenDeleted): Know if a particular file that existed in a previous version of the dataset no longer exists in the latest version. + + +In addition, the DataFile API payload has been extended to include the following fields: + +- tabularData: Boolean field to know if the DataFile is of tabular type + + +- fileAccessRequest: Boolean field to know if the file access requests are enabled on the Dataset (DataFile owner) diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index 21be702d62b..6edd413b7a5 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -404,6 +404,18 @@ A curl example using an ``id``:: curl -H "X-Dataverse-key:$API_TOKEN" -X GET http://$SERVER/api/access/datafile/{id}/listRequests +User Has Requested Access to a File: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``/api/access/datafile/{id}/userFileAccessRequested`` + +This method returns true or false depending on whether or not the calling user has requested access to a particular file. + +A curl example using an ``id``:: + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "http://$SERVER/api/access/datafile/{id}/userFileAccessRequested" + + Get User Permissions on a File: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 5c72937eecb..cf869d338ca 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -970,6 +970,45 @@ This endpoint supports optional pagination, through the ``limit`` and ``offset`` curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?limit=10&offset=20" +Category name filtering is also optionally supported. To return files to which the requested category has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?categoryName=Data" + +Content type filtering is also optionally supported. To return files matching the requested content type. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?contentType=image/png" + +Filtering by search text is also optionally supported. The search will be applied to the labels and descriptions of the dataset files, to return the files that contain the text searched in one of such fields. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?searchText=word" + +File access filtering is also optionally supported. In particular, by the following possible values: + +* ``Public`` +* ``Restricted`` +* ``EmbargoedThenRestricted`` +* ``EmbargoedThenPublic`` + +If no filter is specified, the files will match all of the above categories. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?accessStatus=Public" + Ordering criteria for sorting the results is also optionally supported. In particular, by the following possible values: * ``NameAZ`` (Default) @@ -979,14 +1018,42 @@ Ordering criteria for sorting the results is also optionally supported. In parti * ``Size`` * ``Type`` -Please note that these values are case sensitive and must be correctly typed for the endpoint to recognize them. - Usage example: .. code-block:: bash curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?orderCriteria=Newest" +Please note that both filtering and ordering criteria values are case sensitive and must be correctly typed for the endpoint to recognize them. + +Keep in mind that you can combine all of the above query params depending on the results you are looking for. + +Get File Counts in a Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Get file counts, for the given dataset and version. + +The returned file counts are based on different criteria: + +- Total (The total file count) +- Per content type +- Per category name +- Per access status (Possible values: Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic) + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export VERSION=1.0 + + curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION/files/counts" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts" + View Dataset Files and Folders as a Directory Index ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -2832,13 +2899,13 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl "$SERVER_URL/api/files/$ID/downloadCount" + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/$ID/downloadCount" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl "https://demo.dataverse.org/api/files/24/downloadCount" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/24/downloadCount" A curl example using a ``PERSISTENT_ID`` @@ -2848,16 +2915,53 @@ A curl example using a ``PERSISTENT_ID`` export SERVER_URL=https://demo.dataverse.org export PERSISTENT_ID=doi:10.5072/FK2/AAA000 - curl "$SERVER_URL/api/files/:persistentId/downloadCount?persistentId=$PERSISTENT_ID" + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/:persistentId/downloadCount?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl "https://demo.dataverse.org/api/files/:persistentId/downloadCount?persistentId=doi:10.5072/FK2/AAA000" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/:persistentId/downloadCount?persistentId=doi:10.5072/FK2/AAA000" If you are interested in download counts for multiple files, see :doc:`/api/metrics`. +File Has Been Deleted +~~~~~~~~~~~~~~~~~~~~~ + +Know if a particular file that existed in a previous version of the dataset no longer exists in the latest version. + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/$ID/hasBeenDeleted" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/24/hasBeenDeleted" + +A curl example using a ``PERSISTENT_ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/:persistentId/hasBeenDeleted?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/:persistentId/hasBeenDeleted?persistentId=doi:10.5072/FK2/AAA000" + Updating File Metadata ~~~~~~~~~~~~~~~~~~~~~~ @@ -2907,6 +3011,55 @@ Also note that dataFileTags are not versioned and changes to these will update t .. _EditingVariableMetadata: +Updating File Metadata Categories +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Updates the categories for an existing file where ``ID`` is the database id of the file to update or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires a ``jsonString`` expressing the category names. + +Although updating categories can also be done with the previous endpoint, this has been created to be more practical when it is only necessary to update categories and not other metadata fields. + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ + -F 'jsonData={"categories":["Category1","Category2"]}' \ + "$SERVER_URL/api/files/$ID/metadata/categories" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ + -F 'jsonData={"categories":["Category1","Category2"]}' \ + "http://demo.dataverse.org/api/files/24/metadata/categories" + +A curl example using a ``PERSISTENT_ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ + -F 'jsonData={"categories":["Category1","Category2"]}' \ + "$SERVER_URL/api/files/:persistentId/metadata/categories?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ + -F 'jsonData={"categories":["Category1","Category2"]}' \ + "https://demo.dataverse.org/api/files/:persistentId/metadata/categories?persistentId=doi:10.5072/FK2/AAA000" + +Note that if the specified categories do not exist, they will be created. + Editing Variable Level Metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index bfa11af6c70..8e0ff2887df 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -200,6 +200,9 @@ 0.43.4 + + + 5.0.0 diff --git a/pom.xml b/pom.xml index e70b723cad5..909e9ee9b80 100644 --- a/pom.xml +++ b/pom.xml @@ -252,6 +252,20 @@ expressly provided + + + com.querydsl + querydsl-apt + ${querydsl.version} + jakarta + provided + + + com.querydsl + querydsl-jpa + ${querydsl.version} + jakarta + commons-io diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java index f4f66d3c874..351c4032939 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java @@ -58,7 +58,7 @@ public enum TagType {Survey, TimeSeries, Panel, Event, Genomics, Network, Geospa private static final Map TagTypeToLabels = new HashMap<>(); - private static final Map TagLabelToTypes = new HashMap<>(); + public static final Map TagLabelToTypes = new HashMap<>(); static { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java new file mode 100644 index 00000000000..6006d937100 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -0,0 +1,241 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.QDataFileCategory; +import edu.harvard.iq.dataverse.QDataFileTag; +import edu.harvard.iq.dataverse.QDvObject; +import edu.harvard.iq.dataverse.QEmbargo; +import edu.harvard.iq.dataverse.QFileMetadata; + +import com.querydsl.core.Tuple; +import com.querydsl.core.types.dsl.BooleanExpression; +import com.querydsl.core.types.dsl.CaseBuilder; +import com.querydsl.core.types.dsl.DateExpression; +import com.querydsl.core.types.dsl.DateTimeExpression; + +import com.querydsl.jpa.impl.JPAQuery; +import com.querydsl.jpa.impl.JPAQueryFactory; + +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; + +import java.io.Serializable; +import java.sql.Timestamp; +import java.time.LocalDate; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static edu.harvard.iq.dataverse.DataFileTag.TagLabelToTypes; + +@Stateless +@Named +public class DatasetVersionFilesServiceBean implements Serializable { + + @PersistenceContext(unitName = "VDCNet-ejbPU") + private EntityManager em; + + private final QFileMetadata fileMetadata = QFileMetadata.fileMetadata; + private final QDvObject dvObject = QDvObject.dvObject; + private final QDataFileCategory dataFileCategory = QDataFileCategory.dataFileCategory; + private final QDataFileTag dataFileTag = QDataFileTag.dataFileTag; + + /** + * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} + */ + public enum FileMetadatasOrderCriteria { + NameAZ, NameZA, Newest, Oldest, Size, Type + } + + /** + * Status of the particular DataFile based on active embargoes and restriction state used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} + */ + public enum DataFileAccessStatus { + Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic + } + + /** + * Given a DatasetVersion, returns its total file metadata count + * + * @param datasetVersion the DatasetVersion to access + * @return long value of total file metadata count + */ + public long getFileMetadataCount(DatasetVersion datasetVersion) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + return queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())).stream().count(); + } + + /** + * Given a DatasetVersion, returns its file metadata count per content type + * + * @param datasetVersion the DatasetVersion to access + * @return Map of file metadata counts per content type + */ + public Map getFileMetadataCountPerContentType(DatasetVersion datasetVersion) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + List contentTypeOccurrences = queryFactory + .select(fileMetadata.dataFile.contentType, fileMetadata.count()) + .from(fileMetadata) + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())) + .groupBy(fileMetadata.dataFile.contentType).fetch(); + Map result = new HashMap<>(); + for (Tuple occurrence : contentTypeOccurrences) { + result.put(occurrence.get(fileMetadata.dataFile.contentType), occurrence.get(fileMetadata.count())); + } + return result; + } + + /** + * Given a DatasetVersion, returns its file metadata count per category name + * + * @param datasetVersion the DatasetVersion to access + * @return Map of file metadata counts per category name + */ + public Map getFileMetadataCountPerCategoryName(DatasetVersion datasetVersion) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + List categoryNameOccurrences = queryFactory + .select(dataFileCategory.name, fileMetadata.count()) + .from(dataFileCategory, fileMetadata) + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(fileMetadata.fileCategories.contains(dataFileCategory))) + .groupBy(dataFileCategory.name).fetch(); + Map result = new HashMap<>(); + for (Tuple occurrence : categoryNameOccurrences) { + result.put(occurrence.get(dataFileCategory.name), occurrence.get(fileMetadata.count())); + } + return result; + } + + /** + * Given a DatasetVersion, returns its file metadata count per DataFileAccessStatus + * + * @param datasetVersion the DatasetVersion to access + * @return Map of file metadata counts per DataFileAccessStatus + */ + public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion) { + Map allCounts = new HashMap<>(); + addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.Public); + addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.Restricted); + addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.EmbargoedThenPublic); + addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.EmbargoedThenRestricted); + return allCounts; + } + + /** + * Returns a FileMetadata list of files in the specified DatasetVersion + * + * @param datasetVersion the DatasetVersion to access + * @param limit for pagination, can be null + * @param offset for pagination, can be null + * @param contentType for retrieving only files with this content type + * @param accessStatus for retrieving only files with this DataFileAccessStatus + * @param categoryName for retrieving only files categorized with this category name + * @param tabularTagName for retrieving only files categorized with this tabular tag name + * @param searchText for retrieving only files that contain the specified text within their labels or descriptions + * @param orderCriteria a FileMetadatasOrderCriteria to order the results + * @return a FileMetadata list from the specified DatasetVersion + */ + public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, String contentType, DataFileAccessStatus accessStatus, String categoryName, String tabularTagName, String searchText, FileMetadatasOrderCriteria orderCriteria) { + JPAQuery baseQuery = createGetFileMetadatasBaseQuery(datasetVersion, orderCriteria); + + if (contentType != null) { + baseQuery.where(fileMetadata.dataFile.contentType.eq(contentType)); + } + if (accessStatus != null) { + baseQuery.where(createGetFileMetadatasAccessStatusExpression(accessStatus)); + } + if (categoryName != null) { + baseQuery.from(dataFileCategory).where(dataFileCategory.name.eq(categoryName).and(fileMetadata.fileCategories.contains(dataFileCategory))); + } + if (tabularTagName != null) { + baseQuery.from(dataFileTag).where(dataFileTag.type.eq(TagLabelToTypes.get(tabularTagName)).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))); + } + if (searchText != null && !searchText.isEmpty()) { + searchText = searchText.trim().toLowerCase(); + baseQuery.where(fileMetadata.label.lower().contains(searchText).or(fileMetadata.description.lower().contains(searchText))); + } + + applyOrderCriteriaToGetFileMetadatasQuery(baseQuery, orderCriteria); + + if (limit != null) { + baseQuery.limit(limit); + } + if (offset != null) { + baseQuery.offset(offset); + } + + return baseQuery.fetch(); + } + + private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, DataFileAccessStatus dataFileAccessStatus) { + long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus); + if (fileMetadataCount > 0) { + totalCounts.put(dataFileAccessStatus, fileMetadataCount); + } + } + + private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, DataFileAccessStatus accessStatus) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + return queryFactory + .selectFrom(fileMetadata) + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(createGetFileMetadatasAccessStatusExpression(accessStatus))) + .stream().count(); + } + + private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion datasetVersion, FileMetadatasOrderCriteria orderCriteria) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + JPAQuery baseQuery = queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); + if (orderCriteria == FileMetadatasOrderCriteria.Newest || orderCriteria == FileMetadatasOrderCriteria.Oldest) { + baseQuery.from(dvObject).where(dvObject.id.eq(fileMetadata.dataFile.id)); + } + return baseQuery; + } + + private BooleanExpression createGetFileMetadatasAccessStatusExpression(DataFileAccessStatus accessStatus) { + QEmbargo embargo = fileMetadata.dataFile.embargo; + BooleanExpression activelyEmbargoedExpression = embargo.dateAvailable.goe(DateExpression.currentDate(LocalDate.class)); + BooleanExpression inactivelyEmbargoedExpression = embargo.isNull(); + BooleanExpression accessStatusExpression; + switch (accessStatus) { + case EmbargoedThenRestricted: + accessStatusExpression = activelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isTrue()); + break; + case EmbargoedThenPublic: + accessStatusExpression = activelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isFalse()); + break; + case Restricted: + accessStatusExpression = inactivelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isTrue()); + break; + case Public: + accessStatusExpression = inactivelyEmbargoedExpression.and(fileMetadata.dataFile.restricted.isFalse()); + break; + default: + throw new IllegalStateException("Unexpected value: " + accessStatus); + } + return accessStatusExpression; + } + + private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery query, FileMetadatasOrderCriteria orderCriteria) { + DateTimeExpression orderByLifetimeExpression = new CaseBuilder().when(dvObject.publicationDate.isNotNull()).then(dvObject.publicationDate).otherwise(dvObject.createDate); + switch (orderCriteria) { + case NameZA: + query.orderBy(fileMetadata.label.desc()); + break; + case Newest: + query.orderBy(orderByLifetimeExpression.desc()); + break; + case Oldest: + query.orderBy(orderByLifetimeExpression.asc()); + break; + case Size: + query.orderBy(fileMetadata.dataFile.filesize.asc()); + break; + case Type: + query.orderBy(fileMetadata.dataFile.contentType.asc()); + break; + default: + query.orderBy(fileMetadata.label.asc()); + break; + } + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 6f087f9eabc..5c43001dcb5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -49,22 +49,6 @@ public class DatasetVersionServiceBean implements java.io.Serializable { private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL = "SELECT fm FROM FileMetadata fm" - + " WHERE fm.datasetVersion.id=:datasetVersionId" - + " ORDER BY fm.label"; - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE = "SELECT fm FROM FileMetadata fm, DvObject dvo" - + " WHERE fm.datasetVersion.id = :datasetVersionId" - + " AND fm.dataFile.id = dvo.id" - + " ORDER BY CASE WHEN dvo.publicationDate IS NOT NULL THEN dvo.publicationDate ELSE dvo.createDate END"; - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE = "SELECT fm FROM FileMetadata fm, DataFile df" - + " WHERE fm.datasetVersion.id = :datasetVersionId" - + " AND fm.dataFile.id = df.id" - + " ORDER BY df.filesize"; - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE = "SELECT fm FROM FileMetadata fm, DataFile df" - + " WHERE fm.datasetVersion.id = :datasetVersionId" - + " AND fm.dataFile.id = df.id" - + " ORDER BY df.contentType"; - @EJB DatasetServiceBean datasetService; @@ -166,18 +150,6 @@ public DatasetVersion getDatasetVersion(){ } } // end RetrieveDatasetVersionResponse - /** - * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionServiceBean#getFileMetadatas} - */ - public enum FileMetadatasOrderCriteria { - NameAZ, - NameZA, - Newest, - Oldest, - Size, - Type - } - public DatasetVersion find(Object pk) { return em.find(DatasetVersion.class, pk); } @@ -1252,50 +1224,4 @@ public List getUnarchivedDatasetVersions(){ return null; } } // end getUnarchivedDatasetVersions - - /** - * Returns a FileMetadata list of files in the specified DatasetVersion - * - * @param datasetVersion the DatasetVersion to access - * @param limit for pagination, can be null - * @param offset for pagination, can be null - * @param orderCriteria a FileMetadatasOrderCriteria to order the results - * @return a FileMetadata list of the specified DatasetVersion - */ - public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileMetadatasOrderCriteria orderCriteria) { - TypedQuery query = em.createQuery(getQueryStringFromFileMetadatasOrderCriteria(orderCriteria), FileMetadata.class) - .setParameter("datasetVersionId", datasetVersion.getId()); - if (limit != null) { - query.setMaxResults(limit); - } - if (offset != null) { - query.setFirstResult(offset); - } - return query.getResultList(); - } - - private String getQueryStringFromFileMetadatasOrderCriteria(FileMetadatasOrderCriteria orderCriteria) { - String queryString; - switch (orderCriteria) { - case NameZA: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL + " DESC"; - break; - case Newest: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE + " DESC"; - break; - case Oldest: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE; - break; - case Size: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE; - break; - case Type: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE; - break; - default: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL; - break; - } - return queryString; - } } // end class diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 1aa3f4ffde6..3c226e68472 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -1681,7 +1681,47 @@ public Response rejectFileAccess(@Context ContainerRequestContext crc, @PathPara return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.fileAccess.rejectFailure.noRequest", args)); } } - + + @GET + @AuthRequired + @Path("/datafile/{id}/userFileAccessRequested") + public Response getUserFileAccessRequested(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + DataFile dataFile; + AuthenticatedUser requestAuthenticatedUser; + try { + dataFile = findDataFileOrDie(dataFileId); + requestAuthenticatedUser = getRequestAuthenticatedUserOrDie(crc); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + boolean fileAccessRequested = false; + List requests = dataFile.getFileAccessRequests(); + for (FileAccessRequest fileAccessRequest : requests) { + if (fileAccessRequest.getRequester().getId().equals(requestAuthenticatedUser.getId())) { + fileAccessRequested = true; + break; + } + } + return ok(fileAccessRequested); + } + + @GET + @AuthRequired + @Path("/datafile/{id}/userPermissions") + public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + DataFile dataFile; + try { + dataFile = findDataFileOrDie(dataFileId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + User requestUser = getRequestUser(crc); + jsonObjectBuilder.add("canDownloadFile", fileDownloadService.canDownloadFile(createDataverseRequest(requestUser), dataFile)); + jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset)); + return ok(jsonObjectBuilder); + } + // checkAuthorization is a convenience method; it calls the boolean method // isAccessAuthorized(), the actual workhorse, tand throws a 403 exception if not. @@ -1949,21 +1989,4 @@ private URI handleCustomZipDownload(User user, String customZipServiceUrl, Strin } return redirectUri; } - - @GET - @AuthRequired - @Path("/datafile/{id}/userPermissions") - public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { - DataFile dataFile; - try { - dataFile = findDataFileOrDie(dataFileId); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - User requestUser = getRequestUser(crc); - jsonObjectBuilder.add("canDownloadFile", fileDownloadService.canDownloadFile(createDataverseRequest(requestUser), dataFile)); - jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset)); - return ok(jsonObjectBuilder); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 704ec8f1989..b3be55399d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -236,6 +236,9 @@ public class Datasets extends AbstractApiBean { @Inject PrivateUrlServiceBean privateUrlService; + @Inject + DatasetVersionFilesServiceBean datasetVersionFilesServiceBean; + /** * Used to consolidate the way we parse and handle dataset versions. * @param @@ -484,23 +487,56 @@ public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id" : ok(json(dsv)); }, getRequestUser(crc)); } - + @GET @AuthRequired @Path("{id}/versions/{versionId}/files") - public Response getVersionFiles(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset, @QueryParam("orderCriteria") String orderCriteria, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> { + public Response getVersionFiles(@Context ContainerRequestContext crc, + @PathParam("id") String datasetId, + @PathParam("versionId") String versionId, + @QueryParam("limit") Integer limit, + @QueryParam("offset") Integer offset, + @QueryParam("contentType") String contentType, + @QueryParam("accessStatus") String accessStatus, + @QueryParam("categoryName") String categoryName, + @QueryParam("tabularTagName") String tabularTagName, + @QueryParam("searchText") String searchText, + @QueryParam("orderCriteria") String orderCriteria, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { + return response(req -> { DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - DatasetVersionServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; + DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; try { - fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameAZ; + fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameAZ; } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); } - return ok(jsonFileMetadatas(datasetversionService.getFileMetadatas(datasetVersion, limit, offset, fileMetadatasOrderCriteria))); + DatasetVersionFilesServiceBean.DataFileAccessStatus dataFileAccessStatus; + try { + dataFileAccessStatus = accessStatus != null ? DatasetVersionFilesServiceBean.DataFileAccessStatus.valueOf(accessStatus) : null; + } catch (IllegalArgumentException e) { + return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); + } + return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, contentType, dataFileAccessStatus, categoryName, tabularTagName, searchText, fileMetadatasOrderCriteria))); }, getRequestUser(crc)); } - + + @GET + @AuthRequired + @Path("{id}/versions/{versionId}/files/counts") + public Response getVersionFileCounts(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + return response(req -> { + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion)); + jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion))); + jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion))); + jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion))); + return ok(jsonObjectBuilder); + }, getRequestUser(crc)); + } + @GET @AuthRequired @Path("{id}/dirindex") diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 82811162d52..8a9abe68d85 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -4,6 +4,7 @@ import com.google.gson.JsonObject; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileServiceBean; +import edu.harvard.iq.dataverse.DataFileTag; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.DatasetServiceBean; @@ -54,6 +55,7 @@ import java.io.IOException; import java.io.InputStream; +import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -63,15 +65,12 @@ import jakarta.ejb.EJBException; import jakarta.inject.Inject; import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonString; +import jakarta.json.JsonValue; +import jakarta.json.stream.JsonParsingException; import jakarta.servlet.http.HttpServletResponse; -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.DELETE; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.PUT; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.*; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.HttpHeaders; @@ -112,6 +111,8 @@ public class Files extends AbstractApiBean { MakeDataCountLoggingServiceBean mdcLogService; @Inject GuestbookResponseServiceBean guestbookResponseService; + @Inject + DataFileServiceBean dataFileServiceBean; private static final Logger logger = Logger.getLogger(Files.class.getName()); @@ -852,18 +853,85 @@ public Response getFileDataTables(@Context ContainerRequestContext crc, @PathPar try { dataFile = findDataFileOrDie(dataFileId); } catch (WrappedResponse e) { - return error(Response.Status.NOT_FOUND, "File not found for given id."); + return notFound("File not found for given id."); } if (dataFile.isRestricted() || FileUtil.isActivelyEmbargoed(dataFile)) { DataverseRequest dataverseRequest = createDataverseRequest(getRequestUser(crc)); boolean hasPermissionToDownloadFile = permissionSvc.requestOn(dataverseRequest, dataFile).has(Permission.DownloadFile); if (!hasPermissionToDownloadFile) { - return error(FORBIDDEN, "Insufficient permissions to access the requested information."); + return forbidden("Insufficient permissions to access the requested information."); } } if (!dataFile.isTabularData()) { - return error(BAD_REQUEST, "This operation is only available for tabular files."); + return badRequest(BundleUtil.getStringFromBundle("files.api.only.tabular.supported")); } return ok(jsonDT(dataFile.getDataTables())); } + + @POST + @AuthRequired + @Path("{id}/metadata/categories") + @Produces(MediaType.APPLICATION_JSON) + public Response setFileCategories(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId, String jsonBody) { + return response(req -> { + DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); + jakarta.json.JsonObject jsonObject; + try (StringReader stringReader = new StringReader(jsonBody)) { + jsonObject = Json.createReader(stringReader).readObject(); + JsonArray requestedCategoriesJson = jsonObject.getJsonArray("categories"); + FileMetadata fileMetadata = dataFile.getFileMetadata(); + for (JsonValue jsonValue : requestedCategoriesJson) { + JsonString jsonString = (JsonString) jsonValue; + fileMetadata.addCategoryByName(jsonString.getString()); + } + execCommand(new UpdateDatasetVersionCommand(fileMetadata.getDataFile().getOwner(), req)); + return ok("Categories of file " + dataFileId + " updated."); + } catch (JsonParsingException jpe) { + return badRequest("Error parsing Json: " + jpe.getMessage()); + } + }, getRequestUser(crc)); + } + + @POST + @AuthRequired + @Path("{id}/metadata/tabularTags") + @Produces(MediaType.APPLICATION_JSON) + public Response setFileTabularTags(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId, String jsonBody) { + return response(req -> { + DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); + if (!dataFile.isTabularData()) { + return badRequest(BundleUtil.getStringFromBundle("files.api.only.tabular.supported")); + } + jakarta.json.JsonObject jsonObject; + try (StringReader stringReader = new StringReader(jsonBody)) { + jsonObject = Json.createReader(stringReader).readObject(); + JsonArray requestedTabularTagsJson = jsonObject.getJsonArray("tabularTags"); + for (JsonValue jsonValue : requestedTabularTagsJson) { + JsonString jsonString = (JsonString) jsonValue; + DataFileTag tag = new DataFileTag(); + try { + tag.setTypeByLabel(jsonString.getString()); + } catch (IllegalArgumentException iax) { + return badRequest(iax.getMessage()); + } + tag.setDataFile(dataFile); + dataFile.addTag(tag); + } + execCommand(new UpdateDatasetVersionCommand(dataFile.getOwner(), req)); + return ok("Tabular tags of file " + dataFileId + " updated."); + } catch (JsonParsingException jpe) { + return badRequest("Error parsing Json: " + jpe.getMessage()); + } + }, getRequestUser(crc)); + } + + @GET + @AuthRequired + @Path("{id}/hasBeenDeleted") + public Response getHasBeenDeleted(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + return response(req -> { + DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); + return ok(dataFileServiceBean.hasBeenDeleted(dataFile)); + }, getRequestUser(crc)); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index a7aa36f179e..1fed0b233e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -672,9 +672,14 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo //--------------------------------------------- .add("md5", getMd5IfItExists(df.getChecksumType(), df.getChecksumValue())) .add("checksum", getChecksumTypeAndValue(df.getChecksumType(), df.getChecksumValue())) + .add("tabularData", df.isTabularData()) .add("tabularTags", getTabularFileTags(df)) .add("creationDate", df.getCreateDateFormattedYYYYMMDD()) .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD()); + Dataset dfOwner = df.getOwner(); + if (dfOwner != null) { + builder.add("fileAccessRequest", dfOwner.isFileAccessRequest()); + } /* * The restricted state was not included prior to #9175 so to avoid backward * incompatability, it is now only added when generating json for the @@ -1095,6 +1100,22 @@ public Set characteristics() { }; } + public static JsonObjectBuilder json(Map map) { + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + for (Map.Entry mapEntry : map.entrySet()) { + jsonObjectBuilder.add(mapEntry.getKey(), mapEntry.getValue()); + } + return jsonObjectBuilder; + } + + public static JsonObjectBuilder jsonFileCountPerAccessStatusMap(Map map) { + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + for (Map.Entry mapEntry : map.entrySet()) { + jsonObjectBuilder.add(mapEntry.getKey().toString(), mapEntry.getValue()); + } + return jsonObjectBuilder; + } + public static Collector, JsonArrayBuilder> toJsonArray() { return new Collector, JsonArrayBuilder>() { diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index ac725caf1b2..7b4befcca36 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2620,6 +2620,7 @@ admin.api.deleteUser.success=Authenticated User {0} deleted. #Files.java files.api.metadata.update.duplicateFile=Filename already exists at {0} files.api.no.draft=No draft available for this file +files.api.only.tabular.supported=This operation is only available for tabular files. #Datasets.java datasets.api.updatePIDMetadata.failure.dataset.must.be.released=Modify Registration Metadata must be run on a published dataset. diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index b1beddd893f..416caa68566 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -26,11 +26,9 @@ import static jakarta.ws.rs.core.Response.Status.*; import static org.hamcrest.MatcherAssert.*; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; +import static org.junit.jupiter.api.Assertions.*; /** * @@ -632,28 +630,37 @@ public void testZipUploadAndDownload() throws IOException { } @Test - public void testGetUserPermissionsOnFile() { - Response createUser = UtilIT.createRandomUser(); - createUser.then().assertThat().statusCode(OK.getStatusCode()); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); + public void testGetUserFileAccessRequested() { + // Create new user + Response createUserResponse = UtilIT.createRandomUser(); + createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); + String newUserApiToken = UtilIT.getApiTokenFromResponse(createUserResponse); - Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); - createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + String dataFileId = Integer.toString(tabFile3IdRestricted); - Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); - createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + // Call with new user and unrequested access file + Response getUserFileAccessRequestedResponse = UtilIT.getUserFileAccessRequested(dataFileId, newUserApiToken); + getUserFileAccessRequestedResponse.then().assertThat().statusCode(OK.getStatusCode()); - // Upload test file - String pathToTestFile = "src/test/resources/images/coffeeshop.png"; - Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); - uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + boolean userFileAccessRequested = JsonPath.from(getUserFileAccessRequestedResponse.body().asString()).getBoolean("data"); + assertFalse(userFileAccessRequested); + + // Request file access for the new user + Response requestFileAccessResponse = UtilIT.requestFileAccess(dataFileId, newUserApiToken); + requestFileAccessResponse.then().assertThat().statusCode(OK.getStatusCode()); - // Assert user permissions on file - int testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); - Response getUserPermissionsOnFileResponse = UtilIT.getUserPermissionsOnFile(Integer.toString(testFileId), apiToken); + // Call with new user and requested access file + getUserFileAccessRequestedResponse = UtilIT.getUserFileAccessRequested(dataFileId, newUserApiToken); + getUserFileAccessRequestedResponse.then().assertThat().statusCode(OK.getStatusCode()); + userFileAccessRequested = JsonPath.from(getUserFileAccessRequestedResponse.body().asString()).getBoolean("data"); + assertTrue(userFileAccessRequested); + } + + @Test + public void testGetUserPermissionsOnFile() { + // Call with valid file id + Response getUserPermissionsOnFileResponse = UtilIT.getUserPermissionsOnFile(Integer.toString(basicFileId), apiToken); getUserPermissionsOnFileResponse.then().assertThat().statusCode(OK.getStatusCode()); boolean canDownloadFile = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canDownloadFile"); assertTrue(canDownloadFile); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 3b6d4d1ecdf..b9f09cc7c07 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1,6 +1,6 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.DatasetVersionServiceBean; +import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean; import io.restassured.RestAssured; import static io.restassured.RestAssured.given; @@ -9,6 +9,9 @@ import io.restassured.http.ContentType; import io.restassured.response.Response; +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.*; import java.util.logging.Logger; import org.junit.jupiter.api.AfterAll; @@ -17,8 +20,6 @@ import org.skyscreamer.jsonassert.JSONAssert; import org.junit.jupiter.api.Disabled; -import java.util.List; -import java.util.Map; import jakarta.json.JsonObject; import static jakarta.ws.rs.core.Response.Status.CREATED; @@ -39,8 +40,6 @@ import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import java.util.UUID; - import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; @@ -67,8 +66,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.Files; -import java.util.ArrayList; -import java.util.HashMap; + import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObjectBuilder; @@ -77,6 +75,7 @@ import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; +import static java.lang.Thread.sleep; import static org.junit.jupiter.api.Assertions.assertEquals; import org.hamcrest.CoreMatchers; @@ -117,7 +116,9 @@ public static void setUpClass() { Response removeAnonymizedFieldTypeNames = UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); removeAnonymizedFieldTypeNames.then().assertThat() .statusCode(200); - + + UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); + /* With Dual mode, we can no longer mess with upload methods since native is now required for anything to work Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); @@ -144,7 +145,9 @@ public static void afterClass() { Response removeAnonymizedFieldTypeNames = UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); removeAnonymizedFieldTypeNames.then().assertThat() .statusCode(200); - + + UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); + /* See above Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); removeDcmUrl.then().assertThat() @@ -3264,8 +3267,7 @@ public void getDatasetVersionCitation() { .body("data.message", containsString("DRAFT VERSION")); } - @Test - public void getVersionFiles() throws IOException { + public void getVersionFiles() throws IOException, InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -3297,39 +3299,42 @@ public void getVersionFiles() throws IOException { int testPageSize = 2; // Test page 1 - Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, apiToken); - - int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); - assertEquals(testPageSize, fileMetadatasCount); + Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, null, null, null, null, null, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName1)) .body("data[1].label", equalTo(testFileName2)); - // Test page 2 - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, apiToken); - - fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); assertEquals(testPageSize, fileMetadatasCount); + String testFileId1 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[0].dataFile.id"); + String testFileId2 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[1].dataFile.id"); + + // Test page 2 + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, null, null, null, null, null, apiToken); + getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName3)) .body("data[1].label", equalTo(testFileName4)); - // Test page 3 (last) - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, apiToken); - fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); + assertEquals(testPageSize, fileMetadatasCount); + + // Test page 3 (last) + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, null, null, null, null, null, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName5)); + fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + // Test NameZA order criteria - Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); + Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); getVersionFilesResponseNameZACriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3340,7 +3345,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName1)); // Test Newest order criteria - Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); + Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); getVersionFilesResponseNewestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3351,7 +3356,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName1)); // Test Oldest order criteria - Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); + Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); getVersionFilesResponseOldestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3362,7 +3367,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName4)); // Test Size order criteria - Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); + Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); getVersionFilesResponseSizeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3373,7 +3378,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName4)); // Test Type order criteria - Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); + Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); getVersionFilesResponseTypeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3385,9 +3390,186 @@ public void getVersionFiles() throws IOException { // Test invalid order criteria String invalidOrderCriteria = "invalidOrderCriteria"; - Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, invalidOrderCriteria, apiToken); + Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, invalidOrderCriteria, apiToken); getVersionFilesResponseInvalidOrderCriteria.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid order criteria: " + invalidOrderCriteria)); + + // Test Content Type + Response getVersionFilesResponseContentType = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, "image/png", null, null, null, null, null, apiToken); + + getVersionFilesResponseContentType.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName5)); + + fileMetadatasCount = getVersionFilesResponseContentType.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + // Test Category Name + String testCategory = "testCategory"; + Response setFileCategoriesResponse = UtilIT.setFileCategories(testFileId1, apiToken, List.of(testCategory)); + setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); + setFileCategoriesResponse = UtilIT.setFileCategories(testFileId2, apiToken, List.of(testCategory)); + setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response getVersionFilesResponseCategoryName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, testCategory, null, null, null, apiToken); + + getVersionFilesResponseCategoryName.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)) + .body("data[1].label", equalTo(testFileName2)); + + fileMetadatasCount = getVersionFilesResponseCategoryName.jsonPath().getList("data").size(); + assertEquals(2, fileMetadatasCount); + + // Test Access Status Restricted + Response restrictFileResponse = UtilIT.restrictFile(String.valueOf(testFileId1), true, apiToken); + restrictFileResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Restricted.toString(), null, null, null, null, apiToken); + + getVersionFilesResponseRestricted.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)); + + fileMetadatasCount = getVersionFilesResponseRestricted.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + // Test Access Status Embargoed + UtilIT.setSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths, "12"); + String activeEmbargoDate = LocalDate.now().plusMonths(6).format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); + + // Create embargo for test file 1 (Embargoed and Restricted) + Response createActiveFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(testFileId1), activeEmbargoDate, apiToken); + + createActiveFileEmbargoResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Create embargo for test file 2 (Embargoed and Public) + createActiveFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(testFileId2), activeEmbargoDate, apiToken); + + createActiveFileEmbargoResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, null, apiToken); + + getVersionFilesResponseEmbargoedThenPublic.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName2)); + + fileMetadatasCount = getVersionFilesResponseEmbargoedThenPublic.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, null, apiToken); + + getVersionFilesResponseEmbargoedThenRestricted.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)); + + fileMetadatasCount = getVersionFilesResponseEmbargoedThenRestricted.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + // Test Access Status Public + Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString(), null, null, null, null, apiToken); + + getVersionFilesResponsePublic.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName3)) + .body("data[1].label", equalTo(testFileName4)) + .body("data[2].label", equalTo(testFileName5)); + + fileMetadatasCount = getVersionFilesResponsePublic.jsonPath().getList("data").size(); + assertEquals(3, fileMetadatasCount); + + // Test Search Text + Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, "test_1", null, apiToken); + + getVersionFilesResponseSearchText.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)); + + fileMetadatasCount = getVersionFilesResponseSearchText.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + // Test Tabular Tag Name + String pathToTabularTestFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Ensure tabular file is ingested + sleep(2000); + + String tabularTagName = "Survey"; + Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(tabularTagName)); + setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response getVersionFilesResponseTabularTagName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, tabularTagName, null, null, apiToken); + + getVersionFilesResponseTabularTagName.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo("test.tab")); + + fileMetadatasCount = getVersionFilesResponseTabularTagName.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + } + + @Test + public void getVersionFileCounts() throws IOException { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Creating test files + String testFileName1 = "test_1.txt"; + String testFileName2 = "test_2.txt"; + String testFileName3 = "test_3.png"; + + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName1, new byte[50], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName2, new byte[200], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName3, new byte[100], apiToken); + + // Creating a categorized test file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + String testCategory = "testCategory"; + Response setFileCategoriesResponse = UtilIT.setFileCategories(dataFileId, apiToken, List.of(testCategory)); + setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Setting embargo for file (Embargo and Public) + UtilIT.setSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths, "12"); + String activeEmbargoDate = LocalDate.now().plusMonths(6).format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); + Response createFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(dataFileId), activeEmbargoDate, apiToken); + createFileEmbargoResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Getting the file counts and assert each count + Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, ":latest", apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + JsonPath responseJsonPath = getVersionFileCountsResponse.jsonPath(); + LinkedHashMap responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + LinkedHashMap responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + LinkedHashMap responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(4, (Integer) responseJsonPath.get("data.total")); + assertEquals(2, responseCountPerContentTypeMap.get("image/png")); + assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); + assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(3, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString())); + assertEquals(1, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString())); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 0a16bca7008..bfe856a8d18 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2,6 +2,8 @@ import io.restassured.RestAssured; import io.restassured.response.Response; + +import java.util.List; import java.util.logging.Logger; import edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism; @@ -30,16 +32,12 @@ import static jakarta.ws.rs.core.Response.Status.*; import org.hamcrest.CoreMatchers; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.startsWith; -import static org.hamcrest.CoreMatchers.nullValue; import org.hamcrest.Matchers; import org.junit.jupiter.api.AfterAll; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.hamcrest.CoreMatchers.*; +import static org.hamcrest.CoreMatchers.hasItem; +import static org.junit.jupiter.api.Assertions.*; public class FilesIT { @@ -2211,4 +2209,137 @@ public void testGetFileDataTables() throws InterruptedException { getFileDataTablesForTabularFileResponse = UtilIT.getFileDataTables(testTabularFileId, randomUserApiToken); getFileDataTablesForTabularFileResponse.then().assertThat().statusCode(FORBIDDEN.getStatusCode()); } + + @Test + public void testSetFileCategories() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload test file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Set categories + String testCategory1 = "testCategory1"; + String testCategory2 = "testCategory2"; + List testCategories = List.of(testCategory1, testCategory2); + Response setFileCategoriesResponse = UtilIT.setFileCategories(dataFileId, apiToken, testCategories); + setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get file data and check for new categories + Response getFileDataResponse = UtilIT.getFileData(dataFileId, apiToken); + getFileDataResponse.prettyPrint(); + getFileDataResponse.then().assertThat() + .body("data.categories", hasItem(testCategory1)) + .body("data.categories", hasItem(testCategory2)) + .statusCode(OK.getStatusCode()); + } + + @Test + public void testSetFileTabularTags() throws InterruptedException { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload tabular file + String pathToTabularTestFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Ensure tabular file is ingested + sleep(2000); + + // Set tabular tags + String testTabularTag1 = "Survey"; + String testTabularTag2 = "Genomics"; + List testTabularTags = List.of(testTabularTag1, testTabularTag2); + Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, testTabularTags); + setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get file data and check for new categories + Response getFileDataResponse = UtilIT.getFileData(tabularFileId, apiToken); + getFileDataResponse.then().assertThat() + .body("data.dataFile.tabularTags", hasItem(testTabularTag1)) + .body("data.dataFile.tabularTags", hasItem(testTabularTag2)) + .statusCode(OK.getStatusCode()); + + // Set invalid tabular tag + String testInvalidTabularTag = "Invalid"; + setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(testInvalidTabularTag)); + setFileTabularTagsResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + + // Get file data and check categories are unaltered + getFileDataResponse = UtilIT.getFileData(tabularFileId, apiToken); + getFileDataResponse.then().assertThat() + .body("data.dataFile.tabularTags", hasItem(testTabularTag1)) + .body("data.dataFile.tabularTags", hasItem(testTabularTag2)) + .statusCode(OK.getStatusCode()); + } + + @Test + public void testGetHasBeenDeleted() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload test file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Publish dataverse and dataset + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert that the file has not been deleted + Response getHasBeenDeletedResponse = UtilIT.getHasBeenDeleted(dataFileId, apiToken); + getHasBeenDeletedResponse.then().assertThat().statusCode(OK.getStatusCode()); + boolean fileHasBeenDeleted = JsonPath.from(getHasBeenDeletedResponse.body().asString()).getBoolean("data"); + assertFalse(fileHasBeenDeleted); + + // Delete test file + Response deleteFileInDatasetResponse = UtilIT.deleteFileInDataset(Integer.parseInt(dataFileId), apiToken); + deleteFileInDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert that the file has been deleted + getHasBeenDeletedResponse = UtilIT.getHasBeenDeleted(dataFileId, apiToken); + getHasBeenDeletedResponse.then().assertThat().statusCode(OK.getStatusCode()); + fileHasBeenDeleted = JsonPath.from(getHasBeenDeletedResponse.body().asString()).getBoolean("data"); + assertTrue(fileHasBeenDeleted); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index f61b392c898..8e333451c8d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3276,7 +3276,7 @@ static Response getDatasetVersionCitation(Integer datasetId, String version, Str return response; } - static Response getVersionFiles(Integer datasetId, String version, Integer limit, Integer offset, String orderCriteria, String apiToken) { + static Response getVersionFiles(Integer datasetId, String version, Integer limit, Integer offset, String contentType, String accessStatus, String categoryName, String tabularTagName, String searchText, String orderCriteria, String apiToken) { RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken) .contentType("application/json"); @@ -3286,6 +3286,21 @@ static Response getVersionFiles(Integer datasetId, String version, Integer limit if (offset != null) { requestSpecification = requestSpecification.queryParam("offset", offset); } + if (contentType != null) { + requestSpecification = requestSpecification.queryParam("contentType", contentType); + } + if (accessStatus != null) { + requestSpecification = requestSpecification.queryParam("accessStatus", accessStatus); + } + if (categoryName != null) { + requestSpecification = requestSpecification.queryParam("categoryName", categoryName); + } + if (tabularTagName != null) { + requestSpecification = requestSpecification.queryParam("tabularTagName", tabularTagName); + } + if (searchText != null) { + requestSpecification = requestSpecification.queryParam("searchText", searchText); + } if (orderCriteria != null) { requestSpecification = requestSpecification.queryParam("orderCriteria", orderCriteria); } @@ -3317,9 +3332,75 @@ static Response getFileDataTables(String dataFileId, String apiToken) { .get("/api/files/" + dataFileId + "/dataTables"); } + static Response getUserFileAccessRequested(String dataFileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/access/datafile/" + dataFileId + "/userFileAccessRequested"); + } + static Response getUserPermissionsOnFile(String dataFileId, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) .get("/api/access/datafile/" + dataFileId + "/userPermissions"); } + + static Response createFileEmbargo(Integer datasetId, Integer fileId, String dateAvailable, String apiToken) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + jsonBuilder.add("dateAvailable", dateAvailable); + jsonBuilder.add("reason", "This is a test embargo"); + jsonBuilder.add("fileIds", Json.createArrayBuilder().add(fileId)); + String jsonString = jsonBuilder.build().toString(); + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonString) + .contentType("application/json") + .urlEncodingEnabled(false) + .post("/api/datasets/" + datasetId + "/files/actions/:set-embargo"); + } + + static Response getVersionFileCounts(Integer datasetId, String version, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); + } + + static Response setFileCategories(String dataFileId, String apiToken, List categories) { + JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder(); + for (String category : categories) { + jsonArrayBuilder.add(category); + } + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("categories", jsonArrayBuilder); + String jsonString = jsonObjectBuilder.build().toString(); + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonString) + .post("/api/files/" + dataFileId + "/metadata/categories"); + } + + static Response setFileTabularTags(String dataFileId, String apiToken, List tabularTags) { + JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder(); + for (String tabularTag : tabularTags) { + jsonArrayBuilder.add(tabularTag); + } + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("tabularTags", jsonArrayBuilder); + String jsonString = jsonObjectBuilder.build().toString(); + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonString) + .post("/api/files/" + dataFileId + "/metadata/tabularTags"); + } + + static Response deleteFileInDataset(Integer fileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .delete("/api/files/" + fileId); + } + + static Response getHasBeenDeleted(String dataFileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/files/" + dataFileId + "/hasBeenDeleted"); + } } From 440d02030d12852d61a38c458d6d21d1f9f92bfd Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 4 Oct 2023 16:40:29 +0100 Subject: [PATCH 363/396] Added: release notes for PR #9972 --- doc/release-notes/9972-files-api-filter-by-tabular-tags.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/9972-files-api-filter-by-tabular-tags.md diff --git a/doc/release-notes/9972-files-api-filter-by-tabular-tags.md b/doc/release-notes/9972-files-api-filter-by-tabular-tags.md new file mode 100644 index 00000000000..9c3fced1741 --- /dev/null +++ b/doc/release-notes/9972-files-api-filter-by-tabular-tags.md @@ -0,0 +1,3 @@ +- New query parameter `tabularTagName` added to the getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) to return files to which the particular tabular tag has been added. + +- New endpoint to set tabular file tags via API: /api/files/{id}/metadata/tabularTags. From eb56c502dc2179754fa144bf0f354da444612ea9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 26 Sep 2023 16:26:21 -0400 Subject: [PATCH 364/396] allow fast xhtml edits in Docker #9590 --- docker-compose-dev.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 694f2046ca8..930bb1230f5 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -18,6 +18,7 @@ services: - DATAVERSE_AUTH_OIDC_CLIENT_ID=test - DATAVERSE_AUTH_OIDC_CLIENT_SECRET=94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8 - DATAVERSE_AUTH_OIDC_AUTH_SERVER_URL=http://keycloak.mydomain.com:8090/realms/test + - DATAVERSE_JSF_REFRESH_PERIOD=1 ports: - "8080:8080" # HTTP (Dataverse Application) - "4848:4848" # HTTP (Payara Admin Console) @@ -31,6 +32,9 @@ services: volumes: - ./docker-dev-volumes/app/data:/dv - ./docker-dev-volumes/app/secrets:/secrets + # Uncomment for changes to xhtml in Netbeans to be deployed immediately. + # Replace 6.0 with the current version. + # - ./target/dataverse-6.0:/opt/payara/deployments/dataverse tmpfs: - /dumps:mode=770,size=2052M,uid=1000,gid=1000 - /tmp:mode=770,size=2052M,uid=1000,gid=1000 From db1410e8c14c73becf7042b4d5ebca6c0c9ef8bc Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 4 Oct 2023 17:15:01 +0100 Subject: [PATCH 365/396] Added: non-tabular error test case to setFileTabularTags IT --- .../java/edu/harvard/iq/dataverse/api/FilesIT.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index af678905fe1..af020215f5d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2304,6 +2304,16 @@ public void testSetFileTabularTags() throws InterruptedException { actualTabularTagsCount = getFileDataResponse.jsonPath().getList("data.dataFile.tabularTags").size(); assertEquals(2, actualTabularTagsCount); + + // Should receive an error when calling the endpoint for a non-tabular file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String nonTabularFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + setFileTabularTagsResponse = UtilIT.setFileTabularTags(nonTabularFileId, apiToken, List.of(testInvalidTabularTag)); + setFileTabularTagsResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); } @Test From e2fa09680472fdcda617efb430d0b14ccce70ab9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Oct 2023 12:15:18 -0400 Subject: [PATCH 366/396] instead of Netbeans, be generic about IDEs/tools #9590 --- docker-compose-dev.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 930bb1230f5..d4564ab1335 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -32,7 +32,7 @@ services: volumes: - ./docker-dev-volumes/app/data:/dv - ./docker-dev-volumes/app/secrets:/secrets - # Uncomment for changes to xhtml in Netbeans to be deployed immediately. + # Uncomment for changes to xhtml to be deployed immediately (if supported your IDE or toolchain). # Replace 6.0 with the current version. # - ./target/dataverse-6.0:/opt/payara/deployments/dataverse tmpfs: From 9d7108b7ed2759dade7f6d1b67a1ec1ab541cc95 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 4 Oct 2023 23:59:11 +0200 Subject: [PATCH 367/396] chore(build): use stable Rewrite releases for Jakarta EE 10 #8305 --- modules/dataverse-parent/pom.xml | 3 ++- pom.xml | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index bfa11af6c70..b6d846b49bc 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -426,6 +426,7 @@ https://artifacts.unidata.ucar.edu/repository/unidata-all/ + + --> diff --git a/pom.xml b/pom.xml index e70b723cad5..5536bcccb05 100644 --- a/pom.xml +++ b/pom.xml @@ -359,12 +359,12 @@ org.ocpsoft.rewrite rewrite-servlet - 6.0.0-SNAPSHOT + 10.0.0.Final org.ocpsoft.rewrite rewrite-config-prettyfaces - 6.0.0-SNAPSHOT + 10.0.0.Final edu.ucsb.nceas From 365b18cf2ba70d2ac192a04475ede920eb11aa6c Mon Sep 17 00:00:00 2001 From: Lehebax Date: Thu, 5 Oct 2023 12:07:41 +0530 Subject: [PATCH 368/396] Fixed the equals() method check --- .../java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java | 2 +- src/main/java/edu/harvard/iq/dataverse/DataverseContact.java | 2 +- src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java | 2 +- .../edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java | 2 +- src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java index 7746099818e..8ac98500890 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java @@ -113,7 +113,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetField)) { + if (!(object instanceof DatasetFieldDefaultValue)) { return false; } DatasetFieldDefaultValue other = (DatasetFieldDefaultValue) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java b/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java index d77767985eb..9f86a03639a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java @@ -99,7 +99,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseContact)) { return false; } DataverseContact other = (DataverseContact) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java index 768c2308e50..83a2d8fdb8f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java @@ -93,7 +93,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseFacet)) { return false; } DataverseFacet other = (DataverseFacet) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java index 39ad6ca9520..d30d94cd034 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java @@ -85,7 +85,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseFeaturedDataverse)) { return false; } DataverseFeaturedDataverse other = (DataverseFeaturedDataverse) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java b/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java index 539669328a7..7f57d16b95a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java @@ -181,7 +181,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseTheme)) { return false; } DataverseTheme other = (DataverseTheme) object; From f42587e1e33af7170185cc24ce382cb402d72533 Mon Sep 17 00:00:00 2001 From: Lehebax Date: Thu, 5 Oct 2023 19:18:37 +0530 Subject: [PATCH 369/396] Added unit tests for the fixed equals() methods --- .../DatasetFieldDefaultValueTest.java | 47 +++++++++++++++++++ .../iq/dataverse/DataverseContactTest.java | 47 +++++++++++++++++++ .../iq/dataverse/DataverseFacetTest.java | 47 +++++++++++++++++++ .../DataverseFeaturedDataverseTest.java | 47 +++++++++++++++++++ .../iq/dataverse/DataverseThemeTest.java | 47 +++++++++++++++++++ 5 files changed, 235 insertions(+) create mode 100644 src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java create mode 100644 src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java new file mode 100644 index 00000000000..999fadaae06 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DatasetFieldDefaultValueTest { + private DatasetFieldDefaultValue dataverseContact; + + @BeforeEach + public void before() { + this.dataverseContact = new DatasetFieldDefaultValue(); + this.dataverseContact.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseContact.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetField datasetField = new DatasetField(); + + assertFalse(this.dataverseContact.equals(datasetField)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DatasetFieldDefaultValue dataverseContact1 = new DatasetFieldDefaultValue(); + dataverseContact1.setId(1L); + + assertTrue(this.dataverseContact.equals(dataverseContact1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DatasetFieldDefaultValue dataverseContact1 = new DatasetFieldDefaultValue(); + dataverseContact1.setId(2L); + + assertFalse(this.dataverseContact.equals(dataverseContact1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java new file mode 100644 index 00000000000..2abb10a485d --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseContactTest { + private DataverseContact dataverseContact; + + @BeforeEach + public void before() { + this.dataverseContact = new DataverseContact(); + this.dataverseContact.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseContact.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseContact.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseContact dataverseContact1 = new DataverseContact(); + dataverseContact1.setId(1L); + + assertTrue(this.dataverseContact.equals(dataverseContact1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseContact dataverseContact1 = new DataverseContact(); + dataverseContact1.setId(2L); + + assertFalse(this.dataverseContact.equals(dataverseContact1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java new file mode 100644 index 00000000000..7ae50439c10 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseFacetTest { + private DataverseFacet dataverseFacet; + + @BeforeEach + public void before() { + this.dataverseFacet = new DataverseFacet(); + this.dataverseFacet.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseFacet.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseFacet.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseFacet dataverseFacet1 = new DataverseFacet(); + dataverseFacet1.setId(1L); + + assertTrue(this.dataverseFacet.equals(dataverseFacet1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseFacet dataverseFacet1 = new DataverseFacet(); + dataverseFacet1.setId(2L); + + assertFalse(this.dataverseFacet.equals(dataverseFacet1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java new file mode 100644 index 00000000000..b024dc3bfd3 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseFeaturedDataverseTest { + private DataverseFeaturedDataverse dataverseFeaturedDataverse; + + @BeforeEach + public void before() { + this.dataverseFeaturedDataverse = new DataverseFeaturedDataverse(); + this.dataverseFeaturedDataverse.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseFeaturedDataverse.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseFeaturedDataverse.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseFeaturedDataverse dataverseFeaturedDataverse1 = new DataverseFeaturedDataverse(); + dataverseFeaturedDataverse1.setId(1L); + + assertTrue(this.dataverseFeaturedDataverse.equals(dataverseFeaturedDataverse1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseFeaturedDataverse dataverseFeaturedDataverse1 = new DataverseFeaturedDataverse(); + dataverseFeaturedDataverse1.setId(2L); + + assertFalse(this.dataverseFeaturedDataverse.equals(dataverseFeaturedDataverse1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java new file mode 100644 index 00000000000..e6721e34c6f --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseThemeTest { + private DataverseTheme dataverseTheme; + + @BeforeEach + public void before() { + this.dataverseTheme = new DataverseTheme(); + this.dataverseTheme.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseTheme.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseTheme.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseTheme dataverseTheme1 = new DataverseTheme(); + dataverseTheme1.setId(1L); + + assertTrue(this.dataverseTheme.equals(dataverseTheme1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseTheme dataverseTheme1 = new DataverseTheme(); + dataverseTheme1.setId(2L); + + assertFalse(this.dataverseTheme.equals(dataverseTheme1)); + } +} \ No newline at end of file From faa8de1639b007a8c2a9f90b73a723fc10e7f19a Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 10:31:28 +0100 Subject: [PATCH 370/396] Changed: using json files in curl examples for tabular tags and categories update in the docs --- doc/sphinx-guides/source/api/native-api.rst | 54 +++++++++++++++------ 1 file changed, 38 insertions(+), 16 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index f78ae62f3ae..97b41ffa98a 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -3026,6 +3026,15 @@ Updates the categories for an existing file where ``ID`` is the database id of t Although updating categories can also be done with the previous endpoint, this has been created to be more practical when it is only necessary to update categories and not other metadata fields. +The JSON representation of file categories (``categories.json``) looks like this:: + + { + "categories": [ + "Data", + "Custom" + ] + } + A curl example using an ``ID`` .. code-block:: bash @@ -3033,18 +3042,19 @@ A curl example using an ``ID`` export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export ID=24 + export FILE_PATH=categories.json curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"categories":["Category1","Category2"]}' \ - "$SERVER_URL/api/files/$ID/metadata/categories" + "$SERVER_URL/api/files/$ID/metadata/categories" \ + -H "Content-type:application/json" --upload-file $FILE_PATH The fully expanded example above (without environment variables) looks like this: .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"categories":["Category1","Category2"]}' \ - "http://demo.dataverse.org/api/files/24/metadata/categories" + "http://demo.dataverse.org/api/files/24/metadata/categories" \ + -H "Content-type:application/json" --upload-file categories.json A curl example using a ``PERSISTENT_ID`` @@ -3053,18 +3063,19 @@ A curl example using a ``PERSISTENT_ID`` export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + export FILE_PATH=categories.json curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"categories":["Category1","Category2"]}' \ - "$SERVER_URL/api/files/:persistentId/metadata/categories?persistentId=$PERSISTENT_ID" + "$SERVER_URL/api/files/:persistentId/metadata/categories?persistentId=$PERSISTENT_ID" \ + -H "Content-type:application/json" --upload-file $FILE_PATH The fully expanded example above (without environment variables) looks like this: .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"categories":["Category1","Category2"]}' \ - "https://demo.dataverse.org/api/files/:persistentId/metadata/categories?persistentId=doi:10.5072/FK2/AAA000" + "https://demo.dataverse.org/api/files/:persistentId/metadata/categories?persistentId=doi:10.5072/FK2/AAA000" \ + -H "Content-type:application/json" --upload-file categories.json Note that if the specified categories do not exist, they will be created. @@ -3073,6 +3084,15 @@ Updating File Tabular Tags Updates the tabular tags for an existing tabular file where ``ID`` is the database id of the file to update or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires a ``jsonString`` expressing the tabular tag names. +The JSON representation of tabular tags (``tags.json``) looks like this:: + + { + "tabularTags": [ + "Survey", + "Genomics" + ] + } + A curl example using an ``ID`` .. code-block:: bash @@ -3080,18 +3100,19 @@ A curl example using an ``ID`` export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export ID=24 + export FILE_PATH=tags.json curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"tabularTags":["Survey","Genomics"]}' \ - "$SERVER_URL/api/files/$ID/metadata/tabularTags" + "$SERVER_URL/api/files/$ID/metadata/tabularTags" \ + -H "Content-type:application/json" --upload-file $FILE_PATH The fully expanded example above (without environment variables) looks like this: .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"tabularTags":["Survey","Genomics"]}' \ - "http://demo.dataverse.org/api/files/24/metadata/tabularTags" + "http://demo.dataverse.org/api/files/24/metadata/tabularTags" \ + -H "Content-type:application/json" --upload-file tags.json A curl example using a ``PERSISTENT_ID`` @@ -3100,18 +3121,19 @@ A curl example using a ``PERSISTENT_ID`` export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + export FILE_PATH=tags.json curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"tabularTags":["Survey","Genomics"]}' \ - "$SERVER_URL/api/files/:persistentId/metadata/tabularTags?persistentId=$PERSISTENT_ID" + "$SERVER_URL/api/files/:persistentId/metadata/tabularTags?persistentId=$PERSISTENT_ID" \ + -H "Content-type:application/json" --upload-file $FILE_PATH The fully expanded example above (without environment variables) looks like this: .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"tabularTags":["Survey","Genomics"]}' \ - "https://demo.dataverse.org/api/files/:persistentId/metadata/tabularTags?persistentId=doi:10.5072/FK2/AAA000" + "https://demo.dataverse.org/api/files/:persistentId/metadata/tabularTags?persistentId=doi:10.5072/FK2/AAA000" \ + -H "Content-type:application/json" --upload-file tags.json Note that the specified tabular tags must be valid. The supported tags are: From 1440e653b8480c754f0669bb15f1b2cd92442522 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 14:48:30 +0100 Subject: [PATCH 371/396] Refactor: FileSearchCriteria to encapsulate all criteria options --- .../DatasetVersionFilesServiceBean.java | 54 +++++++++---------- .../iq/dataverse/FileSearchCriteria.java | 45 ++++++++++++++++ .../harvard/iq/dataverse/api/Datasets.java | 17 ++++-- .../iq/dataverse/util/json/JsonPrinter.java | 5 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 24 +++++---- 5 files changed, 97 insertions(+), 48 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 6006d937100..a436b10d340 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -29,6 +29,8 @@ import static edu.harvard.iq.dataverse.DataFileTag.TagLabelToTypes; +import edu.harvard.iq.dataverse.FileSearchCriteria.FileAccessStatus; + @Stateless @Named public class DatasetVersionFilesServiceBean implements Serializable { @@ -44,17 +46,10 @@ public class DatasetVersionFilesServiceBean implements Serializable { /** * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} */ - public enum FileMetadatasOrderCriteria { + public enum FileOrderCriteria { NameAZ, NameZA, Newest, Oldest, Size, Type } - /** - * Status of the particular DataFile based on active embargoes and restriction state used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} - */ - public enum DataFileAccessStatus { - Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic - } - /** * Given a DatasetVersion, returns its total file metadata count * @@ -107,17 +102,17 @@ public Map getFileMetadataCountPerCategoryName(DatasetVersion data } /** - * Given a DatasetVersion, returns its file metadata count per DataFileAccessStatus + * Given a DatasetVersion, returns its file metadata count per FileAccessStatus * * @param datasetVersion the DatasetVersion to access - * @return Map of file metadata counts per DataFileAccessStatus + * @return Map of file metadata counts per FileAccessStatus */ - public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion) { - Map allCounts = new HashMap<>(); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.Public); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.Restricted); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.EmbargoedThenPublic); - addAccessStatusCountToTotal(datasetVersion, allCounts, DataFileAccessStatus.EmbargoedThenRestricted); + public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion) { + Map allCounts = new HashMap<>(); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Public); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Restricted); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenPublic); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenRestricted); return allCounts; } @@ -127,29 +122,30 @@ public Map getFileMetadataCountPerAccessStatus(Datas * @param datasetVersion the DatasetVersion to access * @param limit for pagination, can be null * @param offset for pagination, can be null - * @param contentType for retrieving only files with this content type - * @param accessStatus for retrieving only files with this DataFileAccessStatus - * @param categoryName for retrieving only files categorized with this category name - * @param tabularTagName for retrieving only files categorized with this tabular tag name - * @param searchText for retrieving only files that contain the specified text within their labels or descriptions - * @param orderCriteria a FileMetadatasOrderCriteria to order the results + * @param searchCriteria for retrieving only files matching this criteria + * @param orderCriteria a FileOrderCriteria to order the results * @return a FileMetadata list from the specified DatasetVersion */ - public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, String contentType, DataFileAccessStatus accessStatus, String categoryName, String tabularTagName, String searchText, FileMetadatasOrderCriteria orderCriteria) { + public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileSearchCriteria searchCriteria, FileOrderCriteria orderCriteria) { JPAQuery baseQuery = createGetFileMetadatasBaseQuery(datasetVersion, orderCriteria); + String contentType = searchCriteria.getContentType(); if (contentType != null) { baseQuery.where(fileMetadata.dataFile.contentType.eq(contentType)); } + FileAccessStatus accessStatus = searchCriteria.getAccessStatus(); if (accessStatus != null) { baseQuery.where(createGetFileMetadatasAccessStatusExpression(accessStatus)); } + String categoryName = searchCriteria.getCategoryName(); if (categoryName != null) { baseQuery.from(dataFileCategory).where(dataFileCategory.name.eq(categoryName).and(fileMetadata.fileCategories.contains(dataFileCategory))); } + String tabularTagName = searchCriteria.getTabularTagName(); if (tabularTagName != null) { baseQuery.from(dataFileTag).where(dataFileTag.type.eq(TagLabelToTypes.get(tabularTagName)).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))); } + String searchText = searchCriteria.getSearchText(); if (searchText != null && !searchText.isEmpty()) { searchText = searchText.trim().toLowerCase(); baseQuery.where(fileMetadata.label.lower().contains(searchText).or(fileMetadata.description.lower().contains(searchText))); @@ -167,14 +163,14 @@ public List getFileMetadatas(DatasetVersion datasetVersion, Intege return baseQuery.fetch(); } - private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, DataFileAccessStatus dataFileAccessStatus) { + private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, FileAccessStatus dataFileAccessStatus) { long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus); if (fileMetadataCount > 0) { totalCounts.put(dataFileAccessStatus, fileMetadataCount); } } - private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, DataFileAccessStatus accessStatus) { + private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, FileAccessStatus accessStatus) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); return queryFactory .selectFrom(fileMetadata) @@ -182,16 +178,16 @@ private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, D .stream().count(); } - private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion datasetVersion, FileMetadatasOrderCriteria orderCriteria) { + private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion datasetVersion, FileOrderCriteria orderCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); JPAQuery baseQuery = queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); - if (orderCriteria == FileMetadatasOrderCriteria.Newest || orderCriteria == FileMetadatasOrderCriteria.Oldest) { + if (orderCriteria == FileOrderCriteria.Newest || orderCriteria == FileOrderCriteria.Oldest) { baseQuery.from(dvObject).where(dvObject.id.eq(fileMetadata.dataFile.id)); } return baseQuery; } - private BooleanExpression createGetFileMetadatasAccessStatusExpression(DataFileAccessStatus accessStatus) { + private BooleanExpression createGetFileMetadatasAccessStatusExpression(FileAccessStatus accessStatus) { QEmbargo embargo = fileMetadata.dataFile.embargo; BooleanExpression activelyEmbargoedExpression = embargo.dateAvailable.goe(DateExpression.currentDate(LocalDate.class)); BooleanExpression inactivelyEmbargoedExpression = embargo.isNull(); @@ -215,7 +211,7 @@ private BooleanExpression createGetFileMetadatasAccessStatusExpression(DataFileA return accessStatusExpression; } - private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery query, FileMetadatasOrderCriteria orderCriteria) { + private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery query, FileOrderCriteria orderCriteria) { DateTimeExpression orderByLifetimeExpression = new CaseBuilder().when(dvObject.publicationDate.isNotNull()).then(dvObject.publicationDate).otherwise(dvObject.createDate); switch (orderCriteria) { case NameZA: diff --git a/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java b/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java new file mode 100644 index 00000000000..62f10c18bdf --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java @@ -0,0 +1,45 @@ +package edu.harvard.iq.dataverse; + +public class FileSearchCriteria { + + private final String contentType; + private final FileAccessStatus accessStatus; + private final String categoryName; + private final String tabularTagName; + private final String searchText; + + /** + * Status of the particular DataFile based on active embargoes and restriction state + */ + public enum FileAccessStatus { + Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic + } + + public FileSearchCriteria(String contentType, FileAccessStatus accessStatus, String categoryName, String tabularTagName, String searchText) { + this.contentType = contentType; + this.accessStatus = accessStatus; + this.categoryName = categoryName; + this.tabularTagName = tabularTagName; + this.searchText = searchText; + } + + public String getContentType() { + return contentType; + } + + public FileAccessStatus getAccessStatus() { + return accessStatus; + } + + public String getCategoryName() { + return categoryName; + } + + public String getTabularTagName() { + return tabularTagName; + } + + public String getSearchText() { + return searchText; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index b3be55399d8..14fd1b2453c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -506,19 +506,26 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, @Context HttpHeaders headers) { return response(req -> { DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; + DatasetVersionFilesServiceBean.FileOrderCriteria fileOrderCriteria; try { - fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameAZ; + fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ; } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); } - DatasetVersionFilesServiceBean.DataFileAccessStatus dataFileAccessStatus; + FileSearchCriteria.FileAccessStatus dataFileAccessStatus; try { - dataFileAccessStatus = accessStatus != null ? DatasetVersionFilesServiceBean.DataFileAccessStatus.valueOf(accessStatus) : null; + dataFileAccessStatus = accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null; } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); } - return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, contentType, dataFileAccessStatus, categoryName, tabularTagName, searchText, fileMetadatasOrderCriteria))); + FileSearchCriteria fileSearchCriteria = new FileSearchCriteria( + contentType, + dataFileAccessStatus, + categoryName, + tabularTagName, + searchText + ); + return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria))); }, getRequestUser(crc)); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 1fed0b233e4..70840c7502f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -41,7 +41,6 @@ import jakarta.json.Json; import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObjectBuilder; -import jakarta.json.JsonValue; import java.util.function.BiConsumer; import java.util.function.BinaryOperator; @@ -1108,9 +1107,9 @@ public static JsonObjectBuilder json(Map map) { return jsonObjectBuilder; } - public static JsonObjectBuilder jsonFileCountPerAccessStatusMap(Map map) { + public static JsonObjectBuilder jsonFileCountPerAccessStatusMap(Map map) { JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - for (Map.Entry mapEntry : map.entrySet()) { + for (Map.Entry mapEntry : map.entrySet()) { jsonObjectBuilder.add(mapEntry.getKey().toString(), mapEntry.getValue()); } return jsonObjectBuilder; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index b9f09cc7c07..5d1a89aa555 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.api; import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean; +import edu.harvard.iq.dataverse.FileSearchCriteria; import io.restassured.RestAssured; import static io.restassured.RestAssured.given; @@ -3267,6 +3268,7 @@ public void getDatasetVersionCitation() { .body("data.message", containsString("DRAFT VERSION")); } + @Test public void getVersionFiles() throws IOException, InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); @@ -3334,7 +3336,7 @@ public void getVersionFiles() throws IOException, InterruptedException { assertEquals(1, fileMetadatasCount); // Test NameZA order criteria - Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); + Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.NameZA.toString(), apiToken); getVersionFilesResponseNameZACriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3345,7 +3347,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName1)); // Test Newest order criteria - Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); + Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Newest.toString(), apiToken); getVersionFilesResponseNewestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3356,7 +3358,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName1)); // Test Oldest order criteria - Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); + Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Oldest.toString(), apiToken); getVersionFilesResponseOldestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3367,7 +3369,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName4)); // Test Size order criteria - Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); + Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Size.toString(), apiToken); getVersionFilesResponseSizeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3378,7 +3380,7 @@ public void getVersionFiles() throws IOException, InterruptedException { .body("data[4].label", equalTo(testFileName4)); // Test Type order criteria - Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); + Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Type.toString(), apiToken); getVersionFilesResponseTypeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3427,7 +3429,7 @@ public void getVersionFiles() throws IOException, InterruptedException { restrictFileResponse.then().assertThat() .statusCode(OK.getStatusCode()); - Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Restricted.toString(), null, null, null, null, apiToken); + Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, FileSearchCriteria.FileAccessStatus.Restricted.toString(), null, null, null, null, apiToken); getVersionFilesResponseRestricted.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3452,7 +3454,7 @@ public void getVersionFiles() throws IOException, InterruptedException { createActiveFileEmbargoResponse.then().assertThat() .statusCode(OK.getStatusCode()); - Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, null, apiToken); + Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, null, apiToken); getVersionFilesResponseEmbargoedThenPublic.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3461,7 +3463,7 @@ public void getVersionFiles() throws IOException, InterruptedException { fileMetadatasCount = getVersionFilesResponseEmbargoedThenPublic.jsonPath().getList("data").size(); assertEquals(1, fileMetadatasCount); - Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, null, apiToken); + Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, FileSearchCriteria.FileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, null, apiToken); getVersionFilesResponseEmbargoedThenRestricted.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3471,7 +3473,7 @@ public void getVersionFiles() throws IOException, InterruptedException { assertEquals(1, fileMetadatasCount); // Test Access Status Public - Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString(), null, null, null, null, apiToken); + Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, null, null, apiToken); getVersionFilesResponsePublic.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3569,7 +3571,7 @@ public void getVersionFileCounts() throws IOException { assertEquals(2, responseCountPerContentTypeMap.get("image/png")); assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); - assertEquals(3, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.Public.toString())); - assertEquals(1, responseCountPerAccessStatusMap.get(DatasetVersionFilesServiceBean.DataFileAccessStatus.EmbargoedThenPublic.toString())); + assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); } } From 94fe70952ed46fd833f5948dfd9dc6ba92b95f6a Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 14:49:55 +0100 Subject: [PATCH 372/396] Fixed: missing @Test annotation added to getVersionFilesIT --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index b9f09cc7c07..cdd1b4dff2b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3267,6 +3267,7 @@ public void getDatasetVersionCitation() { .body("data.message", containsString("DRAFT VERSION")); } + @Test public void getVersionFiles() throws IOException, InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); From 690ac1e96a2717774e04aefb11603ae126005559 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 15:29:45 +0100 Subject: [PATCH 373/396] Added: file search criteria params to getVersionFileCounts API endpoint (Pending IT to be added) --- .../DatasetVersionFilesServiceBean.java | 99 ++++++++++--------- .../harvard/iq/dataverse/api/Datasets.java | 48 ++++++--- 2 files changed, 89 insertions(+), 58 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index a436b10d340..9afd0513b62 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -54,26 +54,32 @@ public enum FileOrderCriteria { * Given a DatasetVersion, returns its total file metadata count * * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria * @return long value of total file metadata count */ - public long getFileMetadataCount(DatasetVersion datasetVersion) { + public long getFileMetadataCount(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); - return queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())).stream().count(); + JPAQuery baseQuery = queryFactory.selectFrom(fileMetadata).where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + return baseQuery.stream().count(); } /** * Given a DatasetVersion, returns its file metadata count per content type * * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria * @return Map of file metadata counts per content type */ - public Map getFileMetadataCountPerContentType(DatasetVersion datasetVersion) { + public Map getFileMetadataCountPerContentType(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); - List contentTypeOccurrences = queryFactory + JPAQuery baseQuery = queryFactory .select(fileMetadata.dataFile.contentType, fileMetadata.count()) .from(fileMetadata) .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId())) - .groupBy(fileMetadata.dataFile.contentType).fetch(); + .groupBy(fileMetadata.dataFile.contentType); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + List contentTypeOccurrences = baseQuery.fetch(); Map result = new HashMap<>(); for (Tuple occurrence : contentTypeOccurrences) { result.put(occurrence.get(fileMetadata.dataFile.contentType), occurrence.get(fileMetadata.count())); @@ -85,15 +91,18 @@ public Map getFileMetadataCountPerContentType(DatasetVersion datas * Given a DatasetVersion, returns its file metadata count per category name * * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria * @return Map of file metadata counts per category name */ - public Map getFileMetadataCountPerCategoryName(DatasetVersion datasetVersion) { + public Map getFileMetadataCountPerCategoryName(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); - List categoryNameOccurrences = queryFactory + JPAQuery baseQuery = queryFactory .select(dataFileCategory.name, fileMetadata.count()) .from(dataFileCategory, fileMetadata) .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(fileMetadata.fileCategories.contains(dataFileCategory))) - .groupBy(dataFileCategory.name).fetch(); + .groupBy(dataFileCategory.name); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + List categoryNameOccurrences = baseQuery.fetch(); Map result = new HashMap<>(); for (Tuple occurrence : categoryNameOccurrences) { result.put(occurrence.get(dataFileCategory.name), occurrence.get(fileMetadata.count())); @@ -105,14 +114,15 @@ public Map getFileMetadataCountPerCategoryName(DatasetVersion data * Given a DatasetVersion, returns its file metadata count per FileAccessStatus * * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria * @return Map of file metadata counts per FileAccessStatus */ - public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion) { + public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { Map allCounts = new HashMap<>(); - addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Public); - addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Restricted); - addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenPublic); - addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenRestricted); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Public, searchCriteria); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Restricted, searchCriteria); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenPublic, searchCriteria); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenRestricted, searchCriteria); return allCounts; } @@ -128,54 +138,31 @@ public Map getFileMetadataCountPerAccessStatus(DatasetVe */ public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileSearchCriteria searchCriteria, FileOrderCriteria orderCriteria) { JPAQuery baseQuery = createGetFileMetadatasBaseQuery(datasetVersion, orderCriteria); - - String contentType = searchCriteria.getContentType(); - if (contentType != null) { - baseQuery.where(fileMetadata.dataFile.contentType.eq(contentType)); - } - FileAccessStatus accessStatus = searchCriteria.getAccessStatus(); - if (accessStatus != null) { - baseQuery.where(createGetFileMetadatasAccessStatusExpression(accessStatus)); - } - String categoryName = searchCriteria.getCategoryName(); - if (categoryName != null) { - baseQuery.from(dataFileCategory).where(dataFileCategory.name.eq(categoryName).and(fileMetadata.fileCategories.contains(dataFileCategory))); - } - String tabularTagName = searchCriteria.getTabularTagName(); - if (tabularTagName != null) { - baseQuery.from(dataFileTag).where(dataFileTag.type.eq(TagLabelToTypes.get(tabularTagName)).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))); - } - String searchText = searchCriteria.getSearchText(); - if (searchText != null && !searchText.isEmpty()) { - searchText = searchText.trim().toLowerCase(); - baseQuery.where(fileMetadata.label.lower().contains(searchText).or(fileMetadata.description.lower().contains(searchText))); - } - + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); applyOrderCriteriaToGetFileMetadatasQuery(baseQuery, orderCriteria); - if (limit != null) { baseQuery.limit(limit); } if (offset != null) { baseQuery.offset(offset); } - return baseQuery.fetch(); } - private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, FileAccessStatus dataFileAccessStatus) { - long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus); + private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, FileAccessStatus dataFileAccessStatus, FileSearchCriteria searchCriteria) { + long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus, searchCriteria); if (fileMetadataCount > 0) { totalCounts.put(dataFileAccessStatus, fileMetadataCount); } } - private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, FileAccessStatus accessStatus) { + private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, FileAccessStatus accessStatus, FileSearchCriteria searchCriteria) { JPAQueryFactory queryFactory = new JPAQueryFactory(em); - return queryFactory + JPAQuery baseQuery = queryFactory .selectFrom(fileMetadata) - .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(createGetFileMetadatasAccessStatusExpression(accessStatus))) - .stream().count(); + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(createGetFileMetadatasAccessStatusExpression(accessStatus))); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + return baseQuery.stream().count(); } private JPAQuery createGetFileMetadatasBaseQuery(DatasetVersion datasetVersion, FileOrderCriteria orderCriteria) { @@ -211,6 +198,30 @@ private BooleanExpression createGetFileMetadatasAccessStatusExpression(FileAcces return accessStatusExpression; } + private void applyFileSearchCriteriaToQuery(JPAQuery baseQuery, FileSearchCriteria searchCriteria) { + String contentType = searchCriteria.getContentType(); + if (contentType != null) { + baseQuery.where(fileMetadata.dataFile.contentType.eq(contentType)); + } + FileAccessStatus accessStatus = searchCriteria.getAccessStatus(); + if (accessStatus != null) { + baseQuery.where(createGetFileMetadatasAccessStatusExpression(accessStatus)); + } + String categoryName = searchCriteria.getCategoryName(); + if (categoryName != null) { + baseQuery.from(dataFileCategory).where(dataFileCategory.name.eq(categoryName).and(fileMetadata.fileCategories.contains(dataFileCategory))); + } + String tabularTagName = searchCriteria.getTabularTagName(); + if (tabularTagName != null) { + baseQuery.from(dataFileTag).where(dataFileTag.type.eq(TagLabelToTypes.get(tabularTagName)).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))); + } + String searchText = searchCriteria.getSearchText(); + if (searchText != null && !searchText.isEmpty()) { + searchText = searchText.trim().toLowerCase(); + baseQuery.where(fileMetadata.label.lower().contains(searchText).or(fileMetadata.description.lower().contains(searchText))); + } + } + private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery query, FileOrderCriteria orderCriteria) { DateTimeExpression orderByLifetimeExpression = new CaseBuilder().when(dvObject.publicationDate.isNotNull()).then(dvObject.publicationDate).otherwise(dvObject.createDate); switch (orderCriteria) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 14fd1b2453c..ac32454c950 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -512,19 +512,18 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); } - FileSearchCriteria.FileAccessStatus dataFileAccessStatus; + FileSearchCriteria fileSearchCriteria; try { - dataFileAccessStatus = accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null; + fileSearchCriteria = new FileSearchCriteria( + contentType, + accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null, + categoryName, + tabularTagName, + searchText + ); } catch (IllegalArgumentException e) { return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); } - FileSearchCriteria fileSearchCriteria = new FileSearchCriteria( - contentType, - dataFileAccessStatus, - categoryName, - tabularTagName, - searchText - ); return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria))); }, getRequestUser(crc)); } @@ -532,14 +531,35 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, @GET @AuthRequired @Path("{id}/versions/{versionId}/files/counts") - public Response getVersionFileCounts(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + public Response getVersionFileCounts(@Context ContainerRequestContext crc, + @PathParam("id") String datasetId, + @PathParam("versionId") String versionId, + @QueryParam("contentType") String contentType, + @QueryParam("accessStatus") String accessStatus, + @QueryParam("categoryName") String categoryName, + @QueryParam("tabularTagName") String tabularTagName, + @QueryParam("searchText") String searchText, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { return response(req -> { + FileSearchCriteria fileSearchCriteria; + try { + fileSearchCriteria = new FileSearchCriteria( + contentType, + accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null, + categoryName, + tabularTagName, + searchText + ); + } catch (IllegalArgumentException e) { + return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); + } DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion)); - jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion))); - jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion))); - jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion))); + jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria)); + jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria))); + jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion, fileSearchCriteria))); + jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion, fileSearchCriteria))); return ok(jsonObjectBuilder); }, getRequestUser(crc)); } From a0870b8554c709f25fb3bc47e04f58e08e951f2f Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 15:35:17 +0100 Subject: [PATCH 374/396] Refactor: using Bundle.properties string for bad request errors in getVersionFiles and getVersionFileCounts API endpoints --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 6 +++--- src/main/java/propertyFiles/Bundle.properties | 2 ++ 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index ac32454c950..f7a4b1d0d25 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -510,7 +510,7 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, try { fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ; } catch (IllegalArgumentException e) { - return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); + return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.order.criteria", List.of(orderCriteria))); } FileSearchCriteria fileSearchCriteria; try { @@ -522,7 +522,7 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, searchText ); } catch (IllegalArgumentException e) { - return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); + return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus))); } return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria))); }, getRequestUser(crc)); @@ -552,7 +552,7 @@ public Response getVersionFileCounts(@Context ContainerRequestContext crc, searchText ); } catch (IllegalArgumentException e) { - return error(Response.Status.BAD_REQUEST, "Invalid access status: " + accessStatus); + return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus))); } DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 7b4befcca36..3128106d38f 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2646,6 +2646,8 @@ datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymi datasets.api.creationdate=Date Created datasets.api.modificationdate=Last Modified Date datasets.api.curationstatus=Curation Status +datasets.api.version.files.invalid.order.criteria=Invalid order criteria: {0} +datasets.api.version.files.invalid.access.status=Invalid access status: {0} #Dataverses.java From 2abb36fc2f24e78ca75ebe0cbfc0a84a1345af26 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 6 Oct 2023 17:00:56 +0100 Subject: [PATCH 375/396] Added: IT for getVersionFileCounts with criteria --- .../harvard/iq/dataverse/api/DatasetsIT.java | 127 +++++++++++++++++- .../edu/harvard/iq/dataverse/api/UtilIT.java | 22 ++- 2 files changed, 143 insertions(+), 6 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 5d1a89aa555..433628685b2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3484,6 +3484,13 @@ public void getVersionFiles() throws IOException, InterruptedException { fileMetadatasCount = getVersionFilesResponsePublic.jsonPath().getList("data").size(); assertEquals(3, fileMetadatasCount); + // Test invalid access status + String invalidStatus = "invalidStatus"; + Response getVersionFilesResponseInvalidStatus = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, invalidStatus, null, null, null, null, apiToken); + getVersionFilesResponseInvalidStatus.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(invalidStatus)))); + // Test Search Text Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, null, "test_1", null, apiToken); @@ -3519,7 +3526,7 @@ public void getVersionFiles() throws IOException, InterruptedException { } @Test - public void getVersionFileCounts() throws IOException { + public void getVersionFileCounts() throws IOException, InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -3557,8 +3564,10 @@ public void getVersionFileCounts() throws IOException { Response createFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(dataFileId), activeEmbargoDate, apiToken); createFileEmbargoResponse.then().assertThat().statusCode(OK.getStatusCode()); - // Getting the file counts and assert each count - Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, ":latest", apiToken); + String testDatasetVersion = ":latest"; + + // Getting the file counts without criteria and assert each count is correct + Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, null, null, null, apiToken); getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); @@ -3570,8 +3579,120 @@ public void getVersionFileCounts() throws IOException { assertEquals(4, (Integer) responseJsonPath.get("data.total")); assertEquals(2, responseCountPerContentTypeMap.get("image/png")); assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); + assertEquals(2, responseCountPerContentTypeMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(2, responseCountPerAccessStatusMap.size()); + assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); + + // Test content type criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, "image/png", null, null, null, null, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(2, (Integer) responseJsonPath.get("data.total")); + assertEquals(2, responseCountPerContentTypeMap.get("image/png")); + assertEquals(1, responseCountPerContentTypeMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.size()); assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(2, responseCountPerAccessStatusMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); + + // Test access status criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, null, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(3, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("image/png")); + assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); + assertEquals(2, responseCountPerContentTypeMap.size()); + assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + + // Test invalid access status + String invalidStatus = "invalidStatus"; + Response getVersionFilesResponseInvalidStatus = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, invalidStatus, null, null, null, apiToken); + getVersionFilesResponseInvalidStatus.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(invalidStatus)))); + + // Test category name criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, "testCategory", null, null, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(1, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("image/png")); + assertEquals(1, responseCountPerContentTypeMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); + + // Test search text criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, null, null, "test", apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(3, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("image/png")); + assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); + assertEquals(2, responseCountPerContentTypeMap.size()); + assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.size()); + assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + + // Test tabular tag name criteria + String pathToTabularTestFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Ensure tabular file is ingested + sleep(2000); + + String tabularTagName = "Survey"; + Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(tabularTagName)); + setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, null, tabularTagName, null, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(1, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("text/tab-separated-values")); + assertEquals(1, responseCountPerContentTypeMap.size()); + assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 8e333451c8d..6d0f0bfa752 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3358,10 +3358,26 @@ static Response createFileEmbargo(Integer datasetId, Integer fileId, String date .post("/api/datasets/" + datasetId + "/files/actions/:set-embargo"); } - static Response getVersionFileCounts(Integer datasetId, String version, String apiToken) { - return given() + static Response getVersionFileCounts(Integer datasetId, String version, String contentType, String accessStatus, String categoryName, String tabularTagName, String searchText, String apiToken) { + RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); + .contentType("application/json"); + if (contentType != null) { + requestSpecification = requestSpecification.queryParam("contentType", contentType); + } + if (accessStatus != null) { + requestSpecification = requestSpecification.queryParam("accessStatus", accessStatus); + } + if (categoryName != null) { + requestSpecification = requestSpecification.queryParam("categoryName", categoryName); + } + if (tabularTagName != null) { + requestSpecification = requestSpecification.queryParam("tabularTagName", tabularTagName); + } + if (searchText != null) { + requestSpecification = requestSpecification.queryParam("searchText", searchText); + } + return requestSpecification.get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); } static Response setFileCategories(String dataFileId, String apiToken, List categories) { From 65df3d0f4bca41598dcc5cad741779d7d8fd5716 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 9 Oct 2023 09:36:40 +0100 Subject: [PATCH 376/396] Added: count per tabular tag name to getVersionFileCounts API endpoint --- .../DatasetVersionFilesServiceBean.java | 23 +++++++++++++++++++ .../harvard/iq/dataverse/api/Datasets.java | 1 + .../iq/dataverse/util/json/JsonPrinter.java | 8 +++++++ .../harvard/iq/dataverse/api/DatasetsIT.java | 13 +++++++++++ 4 files changed, 45 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 9afd0513b62..b6b095f58dd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -110,6 +110,29 @@ public Map getFileMetadataCountPerCategoryName(DatasetVersion data return result; } + /** + * Given a DatasetVersion, returns its file metadata count per DataFileTag.TagType + * + * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria + * @return Map of file metadata counts per DataFileTag.TagType + */ + public Map getFileMetadataCountPerTabularTagName(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { + JPAQueryFactory queryFactory = new JPAQueryFactory(em); + JPAQuery baseQuery = queryFactory + .select(dataFileTag.type, fileMetadata.count()) + .from(dataFileTag, fileMetadata) + .where(fileMetadata.datasetVersion.id.eq(datasetVersion.getId()).and(fileMetadata.dataFile.dataFileTags.contains(dataFileTag))) + .groupBy(dataFileTag.type); + applyFileSearchCriteriaToQuery(baseQuery, searchCriteria); + List tagNameOccurrences = baseQuery.fetch(); + Map result = new HashMap<>(); + for (Tuple occurrence : tagNameOccurrences) { + result.put(occurrence.get(dataFileTag.type), occurrence.get(fileMetadata.count())); + } + return result; + } + /** * Given a DatasetVersion, returns its file metadata count per FileAccessStatus * diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index f7a4b1d0d25..26d4dd01cf5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -559,6 +559,7 @@ public Response getVersionFileCounts(@Context ContainerRequestContext crc, jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria)); jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria))); jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion, fileSearchCriteria))); + jsonObjectBuilder.add("perTabularTagName", jsonFileCountPerTabularTagNameMap(datasetVersionFilesServiceBean.getFileMetadataCountPerTabularTagName(datasetVersion, fileSearchCriteria))); jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion, fileSearchCriteria))); return ok(jsonObjectBuilder); }, getRequestUser(crc)); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 70840c7502f..6fe1ca87028 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -1115,6 +1115,14 @@ public static JsonObjectBuilder jsonFileCountPerAccessStatusMap(Map map) { + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + for (Map.Entry mapEntry : map.entrySet()) { + jsonObjectBuilder.add(mapEntry.getKey().toString(), mapEntry.getValue()); + } + return jsonObjectBuilder; + } + public static Collector, JsonArrayBuilder> toJsonArray() { return new Collector, JsonArrayBuilder>() { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 433628685b2..53546133b27 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3574,6 +3574,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { JsonPath responseJsonPath = getVersionFileCountsResponse.jsonPath(); LinkedHashMap responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); LinkedHashMap responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + LinkedHashMap responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); LinkedHashMap responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(4, (Integer) responseJsonPath.get("data.total")); @@ -3581,6 +3582,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); assertEquals(2, responseCountPerContentTypeMap.size()); assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(0, responseCountPerTabularTagNameMap.size()); assertEquals(2, responseCountPerAccessStatusMap.size()); assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); @@ -3593,6 +3595,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { responseJsonPath = getVersionFileCountsResponse.jsonPath(); responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(2, (Integer) responseJsonPath.get("data.total")); @@ -3600,6 +3603,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(1, responseCountPerContentTypeMap.size()); assertEquals(1, responseCountPerCategoryNameMap.size()); assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(0, responseCountPerTabularTagNameMap.size()); assertEquals(2, responseCountPerAccessStatusMap.size()); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); @@ -3612,6 +3616,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { responseJsonPath = getVersionFileCountsResponse.jsonPath(); responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(3, (Integer) responseJsonPath.get("data.total")); @@ -3619,6 +3624,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); assertEquals(2, responseCountPerContentTypeMap.size()); assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(0, responseCountPerTabularTagNameMap.size()); assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); @@ -3637,6 +3643,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { responseJsonPath = getVersionFileCountsResponse.jsonPath(); responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(1, (Integer) responseJsonPath.get("data.total")); @@ -3644,6 +3651,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(1, responseCountPerContentTypeMap.size()); assertEquals(1, responseCountPerCategoryNameMap.size()); assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(0, responseCountPerTabularTagNameMap.size()); assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); @@ -3655,6 +3663,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { responseJsonPath = getVersionFileCountsResponse.jsonPath(); responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(3, (Integer) responseJsonPath.get("data.total")); @@ -3662,6 +3671,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); assertEquals(2, responseCountPerContentTypeMap.size()); assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(0, responseCountPerTabularTagNameMap.size()); assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); @@ -3686,12 +3696,15 @@ public void getVersionFileCounts() throws IOException, InterruptedException { responseJsonPath = getVersionFileCountsResponse.jsonPath(); responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); assertEquals(1, (Integer) responseJsonPath.get("data.total")); assertEquals(1, responseCountPerContentTypeMap.get("text/tab-separated-values")); assertEquals(1, responseCountPerContentTypeMap.size()); assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerTabularTagNameMap.size()); + assertEquals(1, responseCountPerTabularTagNameMap.get(tabularTagName)); assertEquals(1, responseCountPerAccessStatusMap.size()); assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); } From 98a444c2108395fc562e0159d554ce1f9968686e Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 9 Oct 2023 09:45:15 +0100 Subject: [PATCH 377/396] Added: docs for extended getVersionFileCounts endpoint --- doc/sphinx-guides/source/api/native-api.rst | 52 +++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 97b41ffa98a..f05c4d42073 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1046,6 +1046,7 @@ The returned file counts are based on different criteria: - Total (The total file count) - Per content type - Per category name +- Per tabular tag name - Per access status (Possible values: Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic) .. code-block:: bash @@ -1062,6 +1063,57 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts" +Category name filtering is optionally supported. To return counts only for files to which the requested category has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?categoryName=Data" + +Tabular tag name filtering is also optionally supported. To return counts only for files to which the requested tabular tag has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?tabularTagName=Survey" + +Content type filtering is also optionally supported. To return counts only for files matching the requested content type. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?contentType=image/png" + +Filtering by search text is also optionally supported. The search will be applied to the labels and descriptions of the dataset files, to return counts only for files that contain the text searched in one of such fields. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?searchText=word" + +File access filtering is also optionally supported. In particular, by the following possible values: + +* ``Public`` +* ``Restricted`` +* ``EmbargoedThenRestricted`` +* ``EmbargoedThenPublic`` + +If no filter is specified, the files will match all of the above categories. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?accessStatus=Public" + +Please note that filtering values are case sensitive and must be correctly typed for the endpoint to recognize them. + +Keep in mind that you can combine all of the above query params depending on the results you are looking for. + View Dataset Files and Folders as a Directory Index ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 7d0501cdc2982e591d99eab29b9569d2880ebf30 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 9 Oct 2023 09:50:30 +0100 Subject: [PATCH 378/396] Added: #9907 release notes --- .../9907-files-api-counts-with-criteria.md | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 doc/release-notes/9907-files-api-counts-with-criteria.md diff --git a/doc/release-notes/9907-files-api-counts-with-criteria.md b/doc/release-notes/9907-files-api-counts-with-criteria.md new file mode 100644 index 00000000000..07cd23daad0 --- /dev/null +++ b/doc/release-notes/9907-files-api-counts-with-criteria.md @@ -0,0 +1,11 @@ +Extended the getVersionFileCounts endpoint (/api/datasets/{id}/versions/{versionId}/files/counts) to support filtering by criteria. + +In particular, the endpoint now accepts the following optional criteria query parameters: + +- contentType +- accessStatus +- categoryName +- tabularTagName +- searchText + +This filtering criteria is the same as the one for the getVersionFiles endpoint. From 35eeed53cefe427df8684ca8c20046be2b2a45f2 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 9 Oct 2023 10:07:53 +0100 Subject: [PATCH 379/396] Refactor: using variable instead of repeated string in IT --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 53546133b27..06d0bed14c0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3636,7 +3636,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { .body("message", equalTo(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(invalidStatus)))); // Test category name criteria - getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, "testCategory", null, null, apiToken); + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, testDatasetVersion, null, null, testCategory, null, null, apiToken); getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); From 1e8b735ca1baba7c12bac0737cfc88eedc084ec3 Mon Sep 17 00:00:00 2001 From: mr-loop-1 Date: Mon, 9 Oct 2023 14:47:26 +0530 Subject: [PATCH 380/396] #9412 added markdown in external tools guide --- .../source/_static/admin/dataverse-external-tools.tsv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index 8543300dd2c..f8bf5fc73d9 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -2,6 +2,6 @@ Tool Type Scope Description Data Explorer explore file "A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse." Whole Tale explore dataset "A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide `_." Binder explore dataset Binder allows you to spin up custom computing environments in the cloud (including Jupyter notebooks) with the files from your dataset. `Installation instructions `_ are in the Data Exploration Lab girder_ythub project. -File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" +File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, markdown (md), text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. From b32d51fab1b78ff9316476f76c549163c4b3e7ba Mon Sep 17 00:00:00 2001 From: mr-loop-1 Date: Mon, 9 Oct 2023 14:50:28 +0530 Subject: [PATCH 381/396] #9412 added markdown to gile previews list --- doc/sphinx-guides/source/user/dataset-management.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 3b5b4ec6ba8..c41ca40dd36 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -200,6 +200,7 @@ Previewers are available for the following file types: - Text - PDF +- Markdown (MD) - Tabular (CSV, Excel, etc., see :doc:`tabulardataingest/index`) - Code (R, etc.) - Images (PNG, GIF, JPG) From dea8bf7636bc396d51773bc481135dab5f1a7679 Mon Sep 17 00:00:00 2001 From: mr-loop-1 Date: Mon, 9 Oct 2023 14:51:13 +0530 Subject: [PATCH 382/396] #9412 removed file extension markdown --- .../source/_static/admin/dataverse-external-tools.tsv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index f8bf5fc73d9..a13dea923e4 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -2,6 +2,6 @@ Tool Type Scope Description Data Explorer explore file "A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse." Whole Tale explore dataset "A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide `_." Binder explore dataset Binder allows you to spin up custom computing environments in the cloud (including Jupyter notebooks) with the files from your dataset. `Installation instructions `_ are in the Data Exploration Lab girder_ythub project. -File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, markdown (md), text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" +File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. From 5a3b7853607d0a995ad9cbdbbcf402114f2a70b8 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 9 Oct 2023 08:53:43 -0400 Subject: [PATCH 383/396] add release note for markdown previewer #9412 --- doc/release-notes/9412-markdown-previewer.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/9412-markdown-previewer.md diff --git a/doc/release-notes/9412-markdown-previewer.md b/doc/release-notes/9412-markdown-previewer.md new file mode 100644 index 00000000000..8faa2679fb0 --- /dev/null +++ b/doc/release-notes/9412-markdown-previewer.md @@ -0,0 +1 @@ +There is now a Markdown (.md) previewer: https://dataverse-guide--9986.org.readthedocs.build/en/9986/user/dataset-management.html#file-previews From cc117bd4396e18f5680f34488928bb7a009b8bf0 Mon Sep 17 00:00:00 2001 From: Abdul Samad <62374784+mr-loop-1@users.noreply.github.com> Date: Mon, 9 Oct 2023 19:03:00 +0530 Subject: [PATCH 384/396] remove extension after markdown Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/user/dataset-management.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index c41ca40dd36..1e8ea897032 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -200,7 +200,7 @@ Previewers are available for the following file types: - Text - PDF -- Markdown (MD) +- Markdown - Tabular (CSV, Excel, etc., see :doc:`tabulardataingest/index`) - Code (R, etc.) - Images (PNG, GIF, JPG) From 44b015f375fc92505def8ef2e2475950a3818d4e Mon Sep 17 00:00:00 2001 From: Abdul Samad <62374784+mr-loop-1@users.noreply.github.com> Date: Mon, 9 Oct 2023 19:03:32 +0530 Subject: [PATCH 385/396] Capitalise Markdown Co-authored-by: Philip Durbin --- .../source/_static/admin/dataverse-external-tools.tsv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index a13dea923e4..4f4c29d0670 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -2,6 +2,6 @@ Tool Type Scope Description Data Explorer explore file "A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse." Whole Tale explore dataset "A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide `_." Binder explore dataset Binder allows you to spin up custom computing environments in the cloud (including Jupyter notebooks) with the files from your dataset. `Installation instructions `_ are in the Data Exploration Lab girder_ythub project. -File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" +File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, Markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. From e1acdd328fa4a6ca6624522e21806c7d2a779ef9 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Oct 2023 14:49:09 +0200 Subject: [PATCH 386/396] test(oidc): deactivate test when no Docker available #9974 As many of IQSS and external devs might not have Docker available, let's deactivate any Testcontainers tests in these cases. --- .../oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java index 5968cf3eaeb..ee6823ef98a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -54,7 +54,7 @@ @Tag(Tags.INTEGRATION_TEST) @Tag(Tags.USES_TESTCONTAINERS) -@Testcontainers +@Testcontainers(disabledWithoutDocker = true) @ExtendWith(MockitoExtension.class) // NOTE: order is important here - Testcontainers must be first, otherwise it's not ready when we call getAuthUrl() @LocalJvmSettings From 2aa7a471249cb129aeef13d6301f10ddb43506b7 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Oct 2023 14:52:43 +0200 Subject: [PATCH 387/396] doc(testing): change docs for TC ITs to disable when no Docker #9974 --- doc/sphinx-guides/source/developers/testing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 4691aca3aad..dab8110b20b 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -316,7 +316,7 @@ Please make sure to: .. code:: java /** A very minimal example for a Testcontainers integration test class. */ - @Testcontainers + @Testcontainers(disabledWithoutDocker = true) @Tag(edu.harvard.iq.dataverse.util.testing.Tags.INTEGRATION_TEST) @Tag(edu.harvard.iq.dataverse.util.testing.Tags.USES_TESTCONTAINERS) class MyExampleIT { /* ... */ } From ed291936810a46e260df9809def80b2d2c5b50dc Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Oct 2023 20:04:11 +0200 Subject: [PATCH 388/396] style(ct): remove empty lines from configbaker Dockerfile --- modules/container-configbaker/Dockerfile | 2 -- 1 file changed, 2 deletions(-) diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile index 2975b043213..9b98334d72b 100644 --- a/modules/container-configbaker/Dockerfile +++ b/modules/container-configbaker/Dockerfile @@ -40,8 +40,6 @@ COPY maven/solr/*.xml ${SOLR_TEMPLATE}/conf/ RUN rm ${SOLR_TEMPLATE}/conf/managed-schema.xml - - # Set the entrypoint to tini (as a process supervisor) ENTRYPOINT ["/usr/bin/dumb-init", "--"] # By default run a script that will print a help message and terminate From e89e2aaeb32f983462ea11b64eceab6ddc926eb7 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 11 Oct 2023 10:47:00 -0400 Subject: [PATCH 389/396] #9507 revert to use dataverse in bundle --- src/main/java/propertyFiles/Bundle.properties | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 4964dac78a4..e3dbdc144f1 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -875,22 +875,22 @@ dataverse.nopublished=No Published Dataverses dataverse.nopublished.tip=In order to use this feature you must have at least one published dataverse. dataverse.contact=Email Dataverse Contact dataverse.link=Link Collection -dataverse.link.btn.tip=Link to Your Collection -dataverse.link.yourDataverses=Your Collection -dataverse.link.yourDataverses.inputPlaceholder=Enter Collection Name -dataverse.link.save=Save Linked collection -dataverse.link.dataverse.choose=Choose which of your collection you would like to link this collection to. -dataverse.link.dataset.choose=Enter the name of the collection you would like to link this dataset to. If you need to remove this link in the future, please contact {0}. -dataverse.link.dataset.none=No linkable collections available. -dataverse.link.no.choice=You have one collection you can add linked collection and datasets in. -dataverse.link.no.linkable=To be able to link a collection or dataset, you need to have your own collection. Create a collection to get started. -dataverse.link.no.linkable.remaining=You have already linked all of your eligible collections. +dataverse.link.btn.tip=Link to Your Dataverse +dataverse.link.yourDataverses=Your Dataverse +dataverse.link.yourDataverses.inputPlaceholder=Enter Dataverse Name +dataverse.link.save=Save Linked Dataverse +dataverse.link.dataverse.choose=Choose which of your dataverses you would like to link this dataverse to. +dataverse.link.dataset.choose=Enter the name of the dataverse you would like to link this dataset to. If you need to remove this link in the future, please contact {0}. +dataverse.link.dataset.none=No linkable dataverses available. +dataverse.link.no.choice=You have one dataverse you can add linked dataverses and datasets in. +dataverse.link.no.linkable=To be able to link a dataverse or dataset, you need to have your own dataverse. Create a dataverse to get started. +dataverse.link.no.linkable.remaining=You have already linked all of your eligible dataverses. dataverse.savedsearch.link=Link Search dataverse.savedsearch.searchquery=Search dataverse.savedsearch.filterQueries=Facets dataverse.savedsearch.save=Save Linked Search -dataverse.savedsearch.dataverse.choose=Choose which of your collection you would like to link this search to. -dataverse.savedsearch.no.choice=You have one collection to which you may add a saved search. +dataverse.savedsearch.dataverse.choose=Choose which of your dataverses you would like to link this search to. +dataverse.savedsearch.no.choice=You have one dataverse to which you may add a saved search. # Bundle file editors, please note that "dataverse.savedsearch.save.success" is used in a unit test dataverse.saved.search.success=The saved search has been successfully linked to {0}. dataverse.saved.search.failure=The saved search was not able to be linked. @@ -2498,7 +2498,7 @@ dataset.registered=DatasetRegistered dataset.registered.msg=Your dataset is now registered. dataset.notlinked=DatasetNotLinked dataset.notlinked.msg=There was a problem linking this dataset to yours: -dataset.linking.popop.already.linked.note=Note: This dataset is already linked to the following collection(s): +dataset.linking.popop.already.linked.note=Note: This dataset is already linked to the following dataverse(s): datasetversion.archive.success=Archival copy of Version successfully submitted datasetversion.archive.failure=Error in submitting an archival copy datasetversion.update.failure=Dataset Version Update failed. Changes are still in the DRAFT version. From 617f36cd98b267bc99d53a7b69c21d96974ff4dc Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 11 Oct 2023 10:48:11 -0400 Subject: [PATCH 390/396] #9507 missed one --- src/main/java/propertyFiles/Bundle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index e3dbdc144f1..89eabaeb0bf 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -874,7 +874,7 @@ dataverse.publish.header=Publish Dataverse dataverse.nopublished=No Published Dataverses dataverse.nopublished.tip=In order to use this feature you must have at least one published dataverse. dataverse.contact=Email Dataverse Contact -dataverse.link=Link Collection +dataverse.link=Link Dataverse dataverse.link.btn.tip=Link to Your Dataverse dataverse.link.yourDataverses=Your Dataverse dataverse.link.yourDataverses.inputPlaceholder=Enter Dataverse Name From ada8cc7a713c8074378c7732d4cf30688d50f9cf Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 13 Oct 2023 10:44:14 +0100 Subject: [PATCH 391/396] Fixed: curl examples in docs for deaccession dataset --- doc/sphinx-guides/source/api/native-api.rst | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index e51ca0055b6..1dc1ab13d9f 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1383,21 +1383,31 @@ Deaccession Dataset Given a version of a dataset, updates its status to deaccessioned. +The JSON body required to deaccession a dataset (``deaccession.json``) looks like this:: + + { + "deaccessionReason": "Description of the deaccession reason.", + "deaccessionForwardURL": "https://demo.dataverse.org" + } + + +Note that the field ``deaccessionForwardURL`` is optional. + .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export ID=24 export VERSIONID=1.0 - export JSON='{"deaccessionReason":"Description of the deaccession reason.", "deaccessionForwardURL":"https://demo.dataverse.org"}' + export FILE_PATH=deaccession.json - curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" -d "$JSON" + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" -H "Content-type:application/json" --upload-file $FILE_PATH The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" -d '{"deaccessionReason":"Description of the deaccession reason.", "deaccessionForwardURL":"https://demo.dataverse.org"}' + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" -H "Content-type:application/json" --upload-file deaccession.json .. note:: You cannot deaccession a dataset more than once. If you call this endpoint twice for the same dataset version, you will get a not found error on the second call, since the dataset you are looking for will no longer be published since it is already deaccessioned. From 1f0efddbd6cb4e10b7f5924dbd338105f18add81 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 13 Oct 2023 11:35:44 +0100 Subject: [PATCH 392/396] Fixed: permission checks in GetSpecificPublishedDatasetVersionCommand --- ...etSpecificPublishedDatasetVersionCommand.java | 3 ++- .../edu/harvard/iq/dataverse/api/DatasetsIT.java | 16 ++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 14 +++++++++----- 3 files changed, 27 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java index 879a694ef57..a87eb8a99a5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -40,7 +41,7 @@ public GetSpecificPublishedDatasetVersionCommand(DataverseRequest aRequest, Data @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { for (DatasetVersion dsv : ds.getVersions()) { - if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned())) { + if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.EditDataset))) { if (dsv.getVersionNumber().equals(majorVersion) && dsv.getMinorVersionNumber().equals(minorVersion)) { return dsv; } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 2d52a6c6e15..ee81d3f67f4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3543,6 +3543,14 @@ public void getVersionFiles() throws IOException, InterruptedException { fileMetadatasCount = getVersionFilesResponseTabularTagName.jsonPath().getList("data").size(); assertEquals(1, fileMetadatasCount); + + // Test that the dataset files for a deaccessioned dataset cannot be accessed by a guest + // By latest published version + Response getDatasetVersionResponse = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, null, null, null, true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + // By specific version 1.0 + getDatasetVersionResponse = UtilIT.getVersionFiles(datasetId, "1.0", null, null, null, null, null, null, null, null, true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } @Test @@ -3620,6 +3628,14 @@ public void getVersionFileCounts() throws IOException { responseJsonPath = getVersionFileCountsResponseDeaccessioned.jsonPath(); assertEquals(4, (Integer) responseJsonPath.get("data.total")); + + // Test that the dataset file counts for a deaccessioned dataset cannot be accessed by a guest + // By latest published version + Response getDatasetVersionResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + // By specific version 1.0 + getDatasetVersionResponse = UtilIT.getVersionFileCounts(datasetId, "1.0", true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 93a7cc64082..434dc6d26f1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3290,9 +3290,11 @@ static Response getVersionFiles(Integer datasetId, boolean includeDeaccessioned, String apiToken) { RequestSpecification requestSpecification = given() - .header(API_TOKEN_HTTP_HEADER, apiToken) .contentType("application/json") .queryParam("includeDeaccessioned", includeDeaccessioned); + if (apiToken != null) { + requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken); + } if (limit != null) { requestSpecification = requestSpecification.queryParam("limit", limit); } @@ -3372,10 +3374,12 @@ static Response createFileEmbargo(Integer datasetId, Integer fileId, String date } static Response getVersionFileCounts(Integer datasetId, String version, boolean includeDeaccessioned, String apiToken) { - return given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .queryParam("includeDeaccessioned", includeDeaccessioned) - .get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); + RequestSpecification requestSpecification = given() + .queryParam("includeDeaccessioned", includeDeaccessioned); + if (apiToken != null) { + requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken); + } + return requestSpecification.get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); } static Response setFileCategories(String dataFileId, String apiToken, List categories) { From 12ba35e9b9c4f0396ed942ea30a832e6a57c22c9 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 13 Oct 2023 17:14:17 +0100 Subject: [PATCH 393/396] Fixed: failing tests after develop merge --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 6626b18219c..34eccd3172a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -3622,8 +3622,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); // Test content type criteria - getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, "image/png", null, null, null, null, false, apiToken); - + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, "image/png", null, null, null, null, false, apiToken); getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); responseJsonPath = getVersionFileCountsResponse.jsonPath(); @@ -3760,7 +3759,7 @@ public void getVersionFileCounts() throws IOException, InterruptedException { getVersionFileCountsResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); responseJsonPath = getVersionFileCountsResponseDeaccessioned.jsonPath(); - assertEquals(4, (Integer) responseJsonPath.get("data.total")); + assertEquals(5, (Integer) responseJsonPath.get("data.total")); // Test that the dataset file counts for a deaccessioned dataset cannot be accessed by a guest // By latest published version From 35f69517ea2139c2e742b7d7b28e1b88dcdd9ef5 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Sun, 15 Oct 2023 20:22:09 -0400 Subject: [PATCH 394/396] Switching to the new version of gdcc/xoai, v5.2.0 (#9910) --- modules/dataverse-parent/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 19f78415280..1d99c1cd3d8 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -165,7 +165,7 @@ 4.4.14 - 5.1.0 + 5.2.0 1.19.0 From 4182b036f24ba8402ffe7f2c304ed4026fa7874d Mon Sep 17 00:00:00 2001 From: Abhinav Rana <142827270+AR-2910@users.noreply.github.com> Date: Mon, 16 Oct 2023 07:50:09 +0530 Subject: [PATCH 395/396] Update config.rst Adding link to "Dataverse General User Interface Translation Guide for Weblate" in the "Tools For Translators" section. Issue #9512. --- doc/sphinx-guides/source/installation/config.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 086b0a80895..ce8876b012c 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1276,6 +1276,8 @@ The list below depicts a set of tools that can be used to ease the amount of wor - `easyTranslationHelper `_, a tool developed by `University of Aveiro `_. +- `Dataverse General User Interface Translation Guide for Weblate `_, a guide produced as part of the `SSHOC Dataverse Translation `_ event. + .. _Web-Analytics-Code: Web Analytics Code From e847ed04e87f16d5423bcfade38453fd1d959343 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 16 Oct 2023 16:53:58 +0100 Subject: [PATCH 396/396] Fixed: set label as second ordering column when ordering by content type --- .../harvard/iq/dataverse/DatasetVersionFilesServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java index 2c14498caa9..701ff4474ea 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -293,7 +293,7 @@ private void applyOrderCriteriaToGetFileMetadatasQuery(JPAQuery qu query.orderBy(fileMetadata.dataFile.filesize.asc()); break; case Type: - query.orderBy(fileMetadata.dataFile.contentType.asc()); + query.orderBy(fileMetadata.dataFile.contentType.asc(), fileMetadata.label.asc()); break; default: query.orderBy(fileMetadata.label.asc());