diff --git a/.env b/.env
index e3ececc2e54..d5cffcec0aa 100644
--- a/.env
+++ b/.env
@@ -1,4 +1,5 @@
APP_IMAGE=gdcc/dataverse:unstable
-POSTGRES_VERSION=13
+POSTGRES_VERSION=16
DATAVERSE_DB_USER=dataverse
SOLR_VERSION=9.3.0
+SKIP_DEPLOY=0
\ No newline at end of file
diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml
index a70c55fc31d..4ad4798bc64 100644
--- a/.github/workflows/maven_unit_test.yml
+++ b/.github/workflows/maven_unit_test.yml
@@ -4,6 +4,7 @@ on:
push:
paths:
- "**.java"
+ - "**.sql"
- "pom.xml"
- "modules/**/pom.xml"
- "!modules/container-base/**"
@@ -11,6 +12,7 @@ on:
pull_request:
paths:
- "**.java"
+ - "**.sql"
- "pom.xml"
- "modules/**/pom.xml"
- "!modules/container-base/**"
diff --git a/.gitignore b/.gitignore
index 7f0d3a2b466..a9733538f7c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -61,3 +61,4 @@ src/main/webapp/resources/images/dataverseproject.png.thumb140
# Docker development volumes
/docker-dev-volumes
+/.vs
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index b2be8f531c4..1430ba951a6 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -56,7 +56,7 @@ If you are interested in working on the main Dataverse code, great! Before you s
Please read http://guides.dataverse.org/en/latest/developers/version-control.html to understand how we use the "git flow" model of development and how we will encourage you to create a GitHub issue (if it doesn't exist already) to associate with your pull request. That page also includes tips on making a pull request.
-After making your pull request, your goal should be to help it advance through our kanban board at https://github.com/orgs/IQSS/projects/2 . If no one has moved your pull request to the code review column in a timely manner, please reach out. Note that once a pull request is created for an issue, we'll remove the issue from the board so that we only track one card (the pull request).
+After making your pull request, your goal should be to help it advance through our kanban board at https://github.com/orgs/IQSS/projects/34 . If no one has moved your pull request to the code review column in a timely manner, please reach out. Note that once a pull request is created for an issue, we'll remove the issue from the board so that we only track one card (the pull request).
Thanks for your contribution!
@@ -64,4 +64,4 @@ Thanks for your contribution!
[Community Call]: https://dataverse.org/community-calls
[dataverse-dev Google Group]: https://groups.google.com/group/dataverse-dev
[community contributors]: https://docs.google.com/spreadsheets/d/1o9DD-MQ0WkrYaEFTD5rF_NtyL8aUISgURsAXSL7Budk/edit?usp=sharing
-[dev efforts]: https://github.com/orgs/IQSS/projects/2#column-5298405
+[dev efforts]: https://github.com/orgs/IQSS/projects/34/views/6
diff --git a/README.md b/README.md
index 831dbfed5ff..651d0352dec 100644
--- a/README.md
+++ b/README.md
@@ -3,7 +3,7 @@ Dataverse®
Dataverse is an [open source][] software platform for sharing, finding, citing, and preserving research data (developed by the [Dataverse team](https://dataverse.org/about) at the [Institute for Quantitative Social Science](https://iq.harvard.edu/) and the [Dataverse community][]).
-[dataverse.org][] is our home on the web and shows a map of Dataverse installations around the world, a list of [features][], [integrations][] that have been made possible through [REST APIs][], our development [roadmap][], and more.
+[dataverse.org][] is our home on the web and shows a map of Dataverse installations around the world, a list of [features][], [integrations][] that have been made possible through [REST APIs][], our [project board][], our development [roadmap][], and more.
We maintain a demo site at [demo.dataverse.org][] which you are welcome to use for testing and evaluating Dataverse.
@@ -29,6 +29,7 @@ Dataverse is a trademark of President and Fellows of Harvard College and is regi
[Installation Guide]: https://guides.dataverse.org/en/latest/installation/index.html
[latest release]: https://github.com/IQSS/dataverse/releases
[features]: https://dataverse.org/software-features
+[project board]: https://github.com/orgs/IQSS/projects/34
[roadmap]: https://www.iq.harvard.edu/roadmap-dataverse-project
[integrations]: https://dataverse.org/integrations
[REST APIs]: https://guides.dataverse.org/en/latest/api/index.html
diff --git a/conf/proxy/Caddyfile b/conf/proxy/Caddyfile
new file mode 100644
index 00000000000..70e6904d26e
--- /dev/null
+++ b/conf/proxy/Caddyfile
@@ -0,0 +1,12 @@
+# This configuration is intended to be used with Caddy, a very small high perf proxy.
+# It will serve the application containers Payara Admin GUI via HTTP instead of HTTPS,
+# avoiding the trouble of self signed certificates for local development.
+
+:4848 {
+ reverse_proxy https://dataverse:4848 {
+ transport http {
+ tls_insecure_skip_verify
+ }
+ header_down Location "^https://" "http://"
+ }
+}
diff --git a/conf/solr/9.3.0/schema.xml b/conf/solr/9.3.0/schema.xml
index 3711ffeddba..90e9287d659 100644
--- a/conf/solr/9.3.0/schema.xml
+++ b/conf/solr/9.3.0/schema.xml
@@ -229,6 +229,8 @@
+
+
- 6.1
+ 6.2
17
UTF-8
@@ -149,18 +149,18 @@
6.2023.8
- 42.6.0
+ 42.7.2
9.3.0
1.12.290
- 0.177.0
+ 26.30.0
8.0.0
1.7.35
- 2.11.0
+ 2.15.1
1.2
3.12.0
- 1.21
+ 1.26.0
4.5.13
4.4.14
@@ -168,11 +168,11 @@
5.2.0
- 1.19.0
- 2.10.1
- 5.10.0
- 5.4.0
- 0.8.10
+ 1.19.7
+ 3.7.1
+ 5.10.2
+ 5.11.0
+ 0.8.11
9.3
@@ -182,8 +182,8 @@
3.3.2
3.5.0
3.1.1
- 3.1.0
- 3.1.0
+ 3.2.5
+ 3.2.5
3.6.0
3.3.1
3.0.0-M7
diff --git a/modules/nginx/Dockerfile b/modules/nginx/Dockerfile
deleted file mode 100644
index 3900076599f..00000000000
--- a/modules/nginx/Dockerfile
+++ /dev/null
@@ -1,9 +0,0 @@
-FROM nginx:latest
-
-# Remove the default NGINX configuration file
-RUN rm /etc/nginx/conf.d/default.conf
-
-# Copy the contents of the local default.conf to the container
-COPY default.conf /etc/nginx/conf.d/
-
-EXPOSE 4849
\ No newline at end of file
diff --git a/modules/nginx/README.md b/modules/nginx/README.md
deleted file mode 100644
index 9d2ff785577..00000000000
--- a/modules/nginx/README.md
+++ /dev/null
@@ -1,7 +0,0 @@
-# nginx proxy
-
-nginx can be used to proxy various services at other ports/protocols from docker.
-
-Currently, this is used to work around a problem with the IntelliJ Payara plugin, which doesn't allow remote redeployment in case the Payara admin is served via HTTPS using a self-signed certificate, which is the case of the default dataverse container installation. This configuration provides an HTTP endpoint at port 4849, and proxies requests to the Payara admin console's HTTPS 4848 endpoint. From the IntelliJ Payara plugin one has to specify the localhost 4849 port (without SSL).
-
-![img.png](img.png)
diff --git a/modules/nginx/default.conf b/modules/nginx/default.conf
deleted file mode 100644
index 8381a66c19a..00000000000
--- a/modules/nginx/default.conf
+++ /dev/null
@@ -1,12 +0,0 @@
-server {
- listen 4849;
-
- # Make it big, so that .war files can be submitted
- client_max_body_size 300M;
-
- location / {
- proxy_pass https://dataverse:4848;
- proxy_ssl_verify off;
- proxy_ssl_server_name on;
- }
-}
diff --git a/pom.xml b/pom.xml
index 34b0ad2e835..8f9d06b8744 100644
--- a/pom.xml
+++ b/pom.xml
@@ -27,7 +27,7 @@
war
1.2.18.4
- 9.21.2
+ 9.22.1
1.20.1
5.2.1
2.4.1
@@ -51,13 +51,24 @@
abdera-i18n
1.1.3
+
+ org.apache.abdera
+ abdera-parser
+ 1.1.3
+
+
+ org.apache.geronimo.specs
+ geronimo-javamail_1.4_spec
+
+
+
-
+
+
+ jakarta.json.bind
+ jakarta.json.bind-api
+
+
+
+ org.eclipse
+ yasson
+ test
+
+
org.glassfish
@@ -466,7 +494,7 @@
org.duracloud
common
- 7.1.1
+ 8.0.0
org.slf4j
@@ -481,7 +509,7 @@
org.duracloud
storeclient
- 7.1.1
+ 8.0.0
org.slf4j
@@ -542,6 +570,10 @@
dataverse-spi
2.0.0
+
+ javax.cache
+ cache-api
+
org.junit.jupiter
@@ -650,7 +682,12 @@
org.htmlunit
htmlunit
- 3.2.0
+ 3.9.0
+ test
+
+
+ com.hazelcast
+ hazelcast
test
@@ -682,6 +719,7 @@
**/firstNames/*.*
**/*.xsl
**/services/*
+ **/*.map
@@ -721,6 +759,7 @@
true
false
+ ${project.build.directory}/${project.artifactId}
true
@@ -911,10 +950,11 @@
true
docker-build
- 13
+ 16
gdcc/dataverse:${app.image.tag}
unstable
+ false
gdcc/base:${base.image.tag}
unstable
gdcc/configbaker:${conf.image.tag}
@@ -927,6 +967,7 @@
${postgresql.server.version}
${solr.version}
dataverse
+ ${app.skipDeploy}
@@ -1012,14 +1053,6 @@
true
-
-
- gdcc/dev_nginx:unstable
-
- ${project.basedir}/modules/nginx
-
-
-
true
diff --git a/scripts/api/data/dataset-create-new-all-default-fields.json b/scripts/api/data/dataset-create-new-all-default-fields.json
index 1118ed98a03..3bcf134bc76 100644
--- a/scripts/api/data/dataset-create-new-all-default-fields.json
+++ b/scripts/api/data/dataset-create-new-all-default-fields.json
@@ -907,14 +907,14 @@
"typeClass": "primitive",
"value": "-70"
},
- "northLongitude": {
- "typeName": "northLongitude",
+ "northLatitude": {
+ "typeName": "northLatitude",
"multiple": false,
"typeClass": "primitive",
"value": "43"
},
- "southLongitude": {
- "typeName": "southLongitude",
+ "southLatitude": {
+ "typeName": "southLatitude",
"multiple": false,
"typeClass": "primitive",
"value": "42"
@@ -933,14 +933,14 @@
"typeClass": "primitive",
"value": "-13"
},
- "northLongitude": {
- "typeName": "northLongitude",
+ "northLatitude": {
+ "typeName": "northLatitude",
"multiple": false,
"typeClass": "primitive",
"value": "29"
},
- "southLongitude": {
- "typeName": "southLongitude",
+ "southLatitude": {
+ "typeName": "southLatitude",
"multiple": false,
"typeClass": "primitive",
"value": "28"
diff --git a/scripts/api/data/metadatablocks/astrophysics.tsv b/scripts/api/data/metadatablocks/astrophysics.tsv
index 4039d32cb75..92792d404c9 100644
--- a/scripts/api/data/metadatablocks/astrophysics.tsv
+++ b/scripts/api/data/metadatablocks/astrophysics.tsv
@@ -2,13 +2,13 @@
astrophysics Astronomy and Astrophysics Metadata
#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id
astroType Type The nature or genre of the content of the files in the dataset. text 0 TRUE TRUE TRUE TRUE FALSE FALSE astrophysics
- astroFacility Facility The observatory or facility where the data was obtained. text 1 TRUE TRUE TRUE TRUE FALSE FALSE astrophysics
- astroInstrument Instrument The instrument used to collect the data. text 2 TRUE TRUE TRUE TRUE FALSE FALSE astrophysics
+ astroFacility Facility The observatory or facility where the data was obtained. text 1 TRUE FALSE TRUE TRUE FALSE FALSE astrophysics
+ astroInstrument Instrument The instrument used to collect the data. text 2 TRUE FALSE TRUE TRUE FALSE FALSE astrophysics
astroObject Object Astronomical Objects represented in the data (Given as SIMBAD recognizable names preferred). text 3 TRUE FALSE TRUE TRUE FALSE FALSE astrophysics
resolution.Spatial Spatial Resolution The spatial (angular) resolution that is typical of the observations, in decimal degrees. text 4 TRUE FALSE FALSE TRUE FALSE FALSE astrophysics
resolution.Spectral Spectral Resolution The spectral resolution that is typical of the observations, given as the ratio \u03bb/\u0394\u03bb. text 5 TRUE FALSE FALSE TRUE FALSE FALSE astrophysics
resolution.Temporal Time Resolution The temporal resolution that is typical of the observations, given in seconds. text 6 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics
- coverage.Spectral.Bandpass Bandpass Conventional bandpass name text 7 TRUE TRUE TRUE TRUE FALSE FALSE astrophysics
+ coverage.Spectral.Bandpass Bandpass Conventional bandpass name text 7 TRUE FALSE TRUE TRUE FALSE FALSE astrophysics
coverage.Spectral.CentralWavelength Central Wavelength (m) The central wavelength of the spectral bandpass, in meters. Enter a floating-point number. float 8 TRUE FALSE TRUE TRUE FALSE FALSE astrophysics
coverage.Spectral.Wavelength Wavelength Range The minimum and maximum wavelength of the spectral bandpass. Enter a floating-point number. none 9 FALSE FALSE TRUE FALSE FALSE FALSE astrophysics
coverage.Spectral.MinimumWavelength Minimum (m) The minimum wavelength of the spectral bandpass, in meters. Enter a floating-point number. float 10 TRUE FALSE FALSE TRUE FALSE FALSE coverage.Spectral.Wavelength astrophysics
diff --git a/scripts/api/data/metadatablocks/biomedical.tsv b/scripts/api/data/metadatablocks/biomedical.tsv
index 28d59130c34..d70f754336a 100644
--- a/scripts/api/data/metadatablocks/biomedical.tsv
+++ b/scripts/api/data/metadatablocks/biomedical.tsv
@@ -13,7 +13,7 @@
studyAssayOtherTechnologyType Other Technology Type If Other was selected in Technology Type, list any other technology types that were used in this Dataset. text 9 TRUE FALSE TRUE TRUE FALSE FALSE biomedical
studyAssayPlatform Technology Platform The manufacturer and name of the technology platform used in the assay (e.g. Bruker AVANCE). text 10 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
studyAssayOtherPlatform Other Technology Platform If Other was selected in Technology Platform, list any other technology platforms that were used in this Dataset. text 11 TRUE FALSE TRUE TRUE FALSE FALSE biomedical
- studyAssayCellType Cell Type The name of the cell line from which the source or sample derives. text 12 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
+ studyAssayCellType Cell Type The name of the cell line from which the source or sample derives. text 12 TRUE FALSE TRUE TRUE FALSE FALSE biomedical
#controlledVocabulary DatasetField Value identifier displayOrder
studyDesignType Case Control EFO_0001427 0
studyDesignType Cross Sectional EFO_0001428 1
diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv
index b21b6bcce57..c5af05927dc 100644
--- a/scripts/api/data/metadatablocks/citation.tsv
+++ b/scripts/api/data/metadatablocks/citation.tsv
@@ -70,7 +70,7 @@
seriesName Name The name of the dataset series text 66 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE series citation
seriesInformation Information Can include 1) a history of the series and 2) a summary of features that apply to the series textbox 67 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE series citation
software Software Information about the software used to generate the Dataset none 68 , FALSE FALSE TRUE FALSE FALSE FALSE citation https://www.w3.org/TR/prov-o/#wasGeneratedBy
- softwareName Name The name of software used to generate the Dataset text 69 #VALUE FALSE TRUE FALSE FALSE FALSE FALSE software citation
+ softwareName Name The name of software used to generate the Dataset text 69 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE software citation
softwareVersion Version The version of the software used to generate the Dataset, e.g. 4.11 text 70 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE software citation
relatedMaterial Related Material Information, such as a persistent ID or citation, about the material related to the Dataset, such as appendices or sampling information available outside of the Dataset textbox 71 FALSE FALSE TRUE FALSE FALSE FALSE citation
relatedDatasets Related Dataset Information, such as a persistent ID or citation, about a related dataset, such as previous research on the Dataset's subject textbox 72 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/relation
diff --git a/scripts/api/data/metadatablocks/geospatial.tsv b/scripts/api/data/metadatablocks/geospatial.tsv
index ce481c1bf84..11408317410 100644
--- a/scripts/api/data/metadatablocks/geospatial.tsv
+++ b/scripts/api/data/metadatablocks/geospatial.tsv
@@ -8,10 +8,10 @@
otherGeographicCoverage Other Other information on the geographic coverage of the data. text 4 #VALUE, FALSE FALSE FALSE TRUE FALSE FALSE geographicCoverage geospatial
geographicUnit Geographic Unit Lowest level of geographic aggregation covered by the Dataset, e.g., village, county, region. text 5 TRUE FALSE TRUE TRUE FALSE FALSE geospatial
geographicBoundingBox Geographic Bounding Box The fundamental geometric description for any Dataset that models geography is the geographic bounding box. It describes the minimum box, defined by west and east longitudes and north and south latitudes, which includes the largest geographic extent of the Dataset's geographic coverage. This element is used in the first pass of a coordinate-based search. Inclusion of this element in the codebook is recommended, but is required if the bound polygon box is included. none 6 FALSE FALSE TRUE FALSE FALSE FALSE geospatial
- westLongitude Westernmost (Left) Longitude Westernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180,0 <= West Bounding Longitude Value <= 180,0. text 7 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial
- eastLongitude Easternmost (Right) Longitude Easternmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180,0 <= East Bounding Longitude Value <= 180,0. text 8 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial
- northLongitude Northernmost (Top) Latitude Northernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90,0 <= North Bounding Latitude Value <= 90,0. text 9 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial
- southLongitude Southernmost (Bottom) Latitude Southernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90,0 <= South Bounding Latitude Value <= 90,0. text 10 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial
+ westLongitude Westernmost (Left) Longitude Westernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180.0 <= West Bounding Longitude Value <= 180.0. text 7 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial
+ eastLongitude Easternmost (Right) Longitude Easternmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180.0 <= East Bounding Longitude Value <= 180.0. text 8 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial
+ northLatitude Northernmost (Top) Latitude Northernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90.0 <= North Bounding Latitude Value <= 90.0. text 9 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial
+ southLatitude Southernmost (Bottom) Latitude Southernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90.0 <= South Bounding Latitude Value <= 90.0. text 10 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial
#controlledVocabulary DatasetField Value identifier displayOrder
country Afghanistan 0
country Albania 1
diff --git a/scripts/api/setup-all.sh b/scripts/api/setup-all.sh
index e247caa72b5..5ddd9a35fdc 100755
--- a/scripts/api/setup-all.sh
+++ b/scripts/api/setup-all.sh
@@ -57,10 +57,6 @@ echo "- Allow internal signup"
curl -X PUT -d yes "${DATAVERSE_URL}/api/admin/settings/:AllowSignUp"
curl -X PUT -d "/dataverseuser.xhtml?editMode=CREATE" "${DATAVERSE_URL}/api/admin/settings/:SignUpUrl"
-curl -X PUT -d doi "${DATAVERSE_URL}/api/admin/settings/:Protocol"
-curl -X PUT -d 10.5072 "${DATAVERSE_URL}/api/admin/settings/:Authority"
-curl -X PUT -d "FK2/" "${DATAVERSE_URL}/api/admin/settings/:Shoulder"
-curl -X PUT -d DataCite "${DATAVERSE_URL}/api/admin/settings/:DoiProvider"
curl -X PUT -d burrito "${DATAVERSE_URL}/api/admin/settings/BuiltinUsers.KEY"
curl -X PUT -d localhost-only "${DATAVERSE_URL}/api/admin/settings/:BlockedApiPolicy"
curl -X PUT -d 'native/http' "${DATAVERSE_URL}/api/admin/settings/:UploadMethods"
diff --git a/scripts/api/setup-optional-harvard.sh b/scripts/api/setup-optional-harvard.sh
index fcbcc08a8e6..1311464e8ff 100755
--- a/scripts/api/setup-optional-harvard.sh
+++ b/scripts/api/setup-optional-harvard.sh
@@ -3,6 +3,7 @@ SERVER=http://localhost:8080/api
echo "Setting up Harvard-specific settings"
# :Authority and :Shoulder are commented out so this script can be used on test servers
+# Should now use the new multipid JVM options instead of these settings
#curl -X PUT -d 10.7910 "$SERVER/admin/settings/:Authority"
#curl -X PUT -d "DVN/" "$SERVER/admin/settings/:Shoulder"
echo "- Application Status header"
diff --git a/scripts/deploy/phoenix.dataverse.org/post b/scripts/deploy/phoenix.dataverse.org/post
index e4c8817844b..9d37c183a1a 100755
--- a/scripts/deploy/phoenix.dataverse.org/post
+++ b/scripts/deploy/phoenix.dataverse.org/post
@@ -4,7 +4,6 @@ cd scripts/api
cd ../..
psql -U dvnapp dvndb -f scripts/database/reference_data.sql
psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/createsequence.sql
-curl http://localhost:8080/api/admin/settings/:DoiProvider -X PUT -d FAKE
scripts/search/tests/publish-dataverse-root
git checkout scripts/api/data/dv-root.json
scripts/search/tests/grant-authusers-add-on-root
diff --git a/scripts/dev/dev-rebuild.sh b/scripts/dev/dev-rebuild.sh
index 9eae195b135..898212b4664 100755
--- a/scripts/dev/dev-rebuild.sh
+++ b/scripts/dev/dev-rebuild.sh
@@ -56,9 +56,6 @@ cd ../..
echo "Creating SQL sequence..."
psql -h localhost -U $DB_USER $DB_NAME -f doc/sphinx-guides/source/_static/util/createsequence.sql
-echo "Setting DOI provider to \"FAKE\"..."
-curl http://localhost:8080/api/admin/settings/:DoiProvider -X PUT -d FAKE
-
echo "Allowing GUI edits to be visible without redeploy..."
$PAYARA_DIR/glassfish/bin/asadmin create-system-properties "dataverse.jsf.refresh-period=1"
diff --git a/scripts/dev/docker-final-setup.sh b/scripts/dev/docker-final-setup.sh
index d2453619ec2..e20ce7ad6b6 100755
--- a/scripts/dev/docker-final-setup.sh
+++ b/scripts/dev/docker-final-setup.sh
@@ -10,9 +10,6 @@ cd ../..
echo "Setting system mail address..."
curl -X PUT -d "dataverse@localhost" "http://localhost:8080/api/admin/settings/:SystemEmail"
-echo "Setting DOI provider to \"FAKE\"..."
-curl "http://localhost:8080/api/admin/settings/:DoiProvider" -X PUT -d FAKE
-
API_TOKEN=$(grep apiToken "/tmp/setup-all.sh.out" | jq ".data.apiToken" | tr -d \")
export API_TOKEN
diff --git a/scripts/installer/as-setup.sh b/scripts/installer/as-setup.sh
index fc5b378cff5..34deddf51a3 100755
--- a/scripts/installer/as-setup.sh
+++ b/scripts/installer/as-setup.sh
@@ -102,17 +102,18 @@ function preliminary_setup()
# password reset token timeout in minutes
./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.auth.password-reset-timeout-in-minutes=60"
- # DataCite DOI Settings
+ # Fake DOI Settings
# (we can no longer offer EZID with their shared test account)
# jvm-options use colons as separators, escape as literal
DOI_BASEURL_ESC=`echo $DOI_BASEURL | sed -e 's/:/\\\:/'`
- ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.datacite.username=${DOI_USERNAME}"
- ./asadmin $ASADMIN_OPTS create-jvm-options '\-Ddataverse.pid.datacite.password=${ALIAS=doi_password_alias}'
- ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.datacite.mds-api-url=$DOI_BASEURL_ESC"
-
+ ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.providers=fake"
+ ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.fake.type=FAKE"
+ ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.fake.label=Fake DOI Provider"
+ ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.fake.authority=10.5072"
+ ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.fake.shoulder=FK2/"
# jvm-options use colons as separators, escape as literal
- DOI_DATACITERESTAPIURL_ESC=`echo $DOI_DATACITERESTAPIURL | sed -e 's/:/\\\:/'`
- ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.datacite.rest-api-url=$DOI_DATACITERESTAPIURL_ESC"
+ #DOI_DATACITERESTAPIURL_ESC=`echo $DOI_DATACITERESTAPIURL | sed -e 's/:/\\\:/'`
+ #./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.testDC.datacite.rest-api-url=$DOI_DATACITERESTAPIURL_ESC"
./asadmin $ASADMIN_OPTS create-jvm-options "-Ddataverse.timerServer=true"
@@ -146,12 +147,10 @@ function final_setup(){
# delete any existing mail/notifyMailSession; configure port, if provided:
./asadmin delete-javamail-resource mail/notifyMailSession
-
- if [ $SMTP_SERVER_PORT"x" != "x" ]
- then
- ./asadmin $ASADMIN_OPTS create-javamail-resource --mailhost "$SMTP_SERVER" --mailuser "dataversenotify" --fromaddress "do-not-reply@${HOST_ADDRESS}" --property mail.smtp.port="${SMTP_SERVER_PORT}" mail/notifyMailSession
- else
- ./asadmin $ASADMIN_OPTS create-javamail-resource --mailhost "$SMTP_SERVER" --mailuser "dataversenotify" --fromaddress "do-not-reply@${HOST_ADDRESS}" mail/notifyMailSession
+ ./asadmin $ASADMIN_OPTS create-system-properties "dataverse.mail.system-email='${ADMIN_EMAIL}'"
+ ./asadmin $ASADMIN_OPTS create-system-properties "dataverse.mail.mta.host='${SMTP_SERVER}'"
+ if [ "x${SMTP_SERVER_PORT}" != "x" ]; then
+ ./asadmin $ASADMIN_OPTS create-system-properties "dataverse.mail.mta.port='${SMTP_SERVER_PORT}'"
fi
}
@@ -279,6 +278,12 @@ if [ ! -d "$DOMAIN_DIR" ]
exit 2
fi
+if [ -z "$ADMIN_EMAIL" ]
+ then
+ echo "You must specify the system admin email address (ADMIN_EMAIL)."
+ exit 1
+fi
+
echo "Setting up your app. server (Payara) to support Dataverse"
echo "Payara directory: "$GLASSFISH_ROOT
echo "Domain directory: "$DOMAIN_DIR
diff --git a/scripts/installer/install.py b/scripts/installer/install.py
index 5a7b9f75696..005fbad46e0 100644
--- a/scripts/installer/install.py
+++ b/scripts/installer/install.py
@@ -422,9 +422,13 @@
conn.close()
if int(pg_major_version) >= 15:
+ admin_conn_string = "dbname='"+pgDb+"' user='postgres' password='"+pgAdminPassword+"' host='"+pgHost+"'"
+ conn = psycopg2.connect(admin_conn_string)
+ conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
+ cur = conn.cursor()
conn_cmd = "GRANT CREATE ON SCHEMA public TO "+pgUser+";"
- print("PostgreSQL 15 or higher detected. Running " + conn_cmd)
try:
+ print("PostgreSQL 15 or higher detected. Running " + conn_cmd)
cur.execute(conn_cmd)
except:
if force:
@@ -564,14 +568,6 @@
except:
sys.exit("Failure to execute setup-all.sh! aborting.")
-# 7b. configure admin email in the application settings
-print("configuring system email address...")
-returnCode = subprocess.call(["curl", "-X", "PUT", "-d", adminEmail, apiUrl+"/admin/settings/:SystemEmail"])
-if returnCode != 0:
- print("\nWARNING: failed to configure the admin email in the Dataverse settings!")
-else:
- print("\ndone.")
-
# 8c. configure remote Solr location, if specified
if solrLocation != "LOCAL":
print("configuring remote Solr location... ("+solrLocation+")")
@@ -587,15 +583,14 @@
print("\n\nYou should now have a running Dataverse instance at")
print(" http://" + hostName + ":8080\n\n")
-# DataCite instructions:
+# PID instructions:
-print("\nYour Dataverse has been configured to use DataCite, to register DOI global identifiers in the ")
+print("\nYour Dataverse has been configured to use a Fake DOI Provider, registering (non-resolvable) DOI global identifiers in the ")
print("test name space \"10.5072\" with the \"shoulder\" \"FK2\"")
-print("However, you have to contact DataCite (support\@datacite.org) and request a test account, before you ")
-print("can publish datasets. Once you receive the account name and password, add them to your domain.xml,")
-print("as the following two JVM options:")
-print("\t-Ddataverse.pid.datacite.username=...")
-print("\t-Ddataverse.pid.datacite.password=...")
+print("You can reconfigure to use additional/alternative providers.")
+print("If you intend to use DOIs, you should contact DataCite (support\@datacite.org) or GDCC (see https://www.gdcc.io/about.html) and request a test account.")
+print("Once you receive the account information (name, password, authority, shoulder), add them to your configuration ")
+print("as described in the Dataverse Guides (see https://guides.dataverse.org/en/latest/installation/config.html#persistent-identifiers-and-publishing-datasets),")
print("and restart payara")
print("If this is a production Dataverse and you are planning to register datasets as ")
print("\"real\", non-test DOIs or Handles, consult the \"Persistent Identifiers and Publishing Datasets\"")
diff --git a/scripts/installer/installAppServer.py b/scripts/installer/installAppServer.py
index 698f5ba9a58..7636490c583 100644
--- a/scripts/installer/installAppServer.py
+++ b/scripts/installer/installAppServer.py
@@ -6,8 +6,9 @@ def runAsadminScript(config):
# commands to set up all the app. server (payara6) components for the application.
# All the parameters must be passed to that script as environmental
# variables:
- os.environ['GLASSFISH_DOMAIN'] = "domain1";
- os.environ['ASADMIN_OPTS'] = "";
+ os.environ['GLASSFISH_DOMAIN'] = "domain1"
+ os.environ['ASADMIN_OPTS'] = ""
+ os.environ['ADMIN_EMAIL'] = config.get('system','ADMIN_EMAIL')
os.environ['HOST_ADDRESS'] = config.get('glassfish','HOST_DNS_ADDRESS')
os.environ['GLASSFISH_ROOT'] = config.get('glassfish','GLASSFISH_DIRECTORY')
diff --git a/src/main/docker/assembly.xml b/src/main/docker/assembly.xml
index 9f9b39617a3..62cd910ef9b 100644
--- a/src/main/docker/assembly.xml
+++ b/src/main/docker/assembly.xml
@@ -3,7 +3,7 @@
- target/${project.artifactId}-${project.version}
+ target/${project.artifactId}
app
WEB-INF/lib/**/*
@@ -11,7 +11,7 @@
- target/${project.artifactId}-${project.version}/WEB-INF/lib
+ target/${project.artifactId}/WEB-INF/lib
deps
diff --git a/src/main/docker/scripts/init_2_configure.sh b/src/main/docker/scripts/init_2_configure.sh
index a98f08088c1..b31cfac37b7 100755
--- a/src/main/docker/scripts/init_2_configure.sh
+++ b/src/main/docker/scripts/init_2_configure.sh
@@ -31,10 +31,6 @@ echo "# Dataverse postboot configuration for Payara" > "${DV_POSTBOOT}"
# EE 8 code annotations or at least glassfish-resources.xml
# NOTE: postboot commands is not multi-line capable, thus spaghetti needed.
-# JavaMail
-echo "INFO: Defining JavaMail."
-echo "create-javamail-resource --mailhost=${DATAVERSE_MAIL_HOST:-smtp} --mailuser=${DATAVERSE_MAIL_USER:-dataversenotify} --fromaddress=${DATAVERSE_MAIL_FROM:-dataverse@localhost} mail/notifyMailSession" >> "${DV_POSTBOOT}"
-
# 3. Domain based configuration options
# Set Dataverse environment variables
echo "INFO: Defining system properties for Dataverse configuration options."
diff --git a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java
deleted file mode 100644
index f1bfc3e290b..00000000000
--- a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java
+++ /dev/null
@@ -1,700 +0,0 @@
-package edu.harvard.iq.dataverse;
-
-import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import edu.harvard.iq.dataverse.util.SystemConfig;
-import java.io.InputStream;
-import jakarta.ejb.EJB;
-import jakarta.inject.Inject;
-import java.util.*;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.commons.lang3.RandomStringUtils;
-import org.jsoup.Jsoup;
-import org.jsoup.nodes.Document;
-import org.jsoup.nodes.Element;
-import org.jsoup.select.Elements;
-
-public abstract class AbstractGlobalIdServiceBean implements GlobalIdServiceBean {
-
- private static final Logger logger = Logger.getLogger(AbstractGlobalIdServiceBean.class.getCanonicalName());
-
- @Inject
- DataverseServiceBean dataverseService;
- @EJB
- protected
- SettingsServiceBean settingsService;
- @Inject
- protected
- DvObjectServiceBean dvObjectService;
- @Inject
- SystemConfig systemConfig;
-
- protected Boolean configured = null;
-
- public static String UNAVAILABLE = ":unav";
-
- @Override
- public Map getMetadataForCreateIndicator(DvObject dvObjectIn) {
- logger.log(Level.FINE,"getMetadataForCreateIndicator(DvObject)");
- Map metadata = new HashMap<>();
- metadata = addBasicMetadata(dvObjectIn, metadata);
- metadata.put("datacite.publicationyear", generateYear(dvObjectIn));
- metadata.put("_target", getTargetUrl(dvObjectIn));
- return metadata;
- }
-
- protected Map getUpdateMetadata(DvObject dvObjectIn) {
- logger.log(Level.FINE,"getUpdateMetadataFromDataset");
- Map metadata = new HashMap<>();
- metadata = addBasicMetadata(dvObjectIn, metadata);
- return metadata;
- }
-
- protected Map addBasicMetadata(DvObject dvObjectIn, Map metadata) {
-
- String authorString = dvObjectIn.getAuthorString();
- if (authorString.isEmpty() || authorString.contains(DatasetField.NA_VALUE)) {
- authorString = UNAVAILABLE;
- }
-
- String producerString = dataverseService.getRootDataverseName();
-
- if (producerString.isEmpty() || producerString.equals(DatasetField.NA_VALUE)) {
- producerString = UNAVAILABLE;
- }
-
- String titleString = dvObjectIn.getCurrentName();
-
- if (titleString.isEmpty() || titleString.equals(DatasetField.NA_VALUE)) {
- titleString = UNAVAILABLE;
- }
-
- metadata.put("datacite.creator", authorString);
- metadata.put("datacite.title", titleString);
- metadata.put("datacite.publisher", producerString);
- metadata.put("datacite.publicationyear", generateYear(dvObjectIn));
- return metadata;
- }
-
- protected Map addDOIMetadataForDestroyedDataset(DvObject dvObjectIn) {
- Map metadata = new HashMap<>();
- String authorString = UNAVAILABLE;
- String producerString = UNAVAILABLE;
- String titleString = "This item has been removed from publication";
-
- metadata.put("datacite.creator", authorString);
- metadata.put("datacite.title", titleString);
- metadata.put("datacite.publisher", producerString);
- metadata.put("datacite.publicationyear", "9999");
- return metadata;
- }
-
- protected String getTargetUrl(DvObject dvObjectIn) {
- logger.log(Level.FINE,"getTargetUrl");
- return systemConfig.getDataverseSiteUrl() + dvObjectIn.getTargetUrl() + dvObjectIn.getGlobalId().asString();
- }
-
- @Override
- public String getIdentifier(DvObject dvObject) {
- GlobalId gid = dvObject.getGlobalId();
- return gid != null ? gid.asString() : null;
- }
-
- protected String generateYear (DvObject dvObjectIn){
- return dvObjectIn.getYearPublishedCreated();
- }
-
- public Map getMetadataForTargetURL(DvObject dvObject) {
- logger.log(Level.FINE,"getMetadataForTargetURL");
- HashMap metadata = new HashMap<>();
- metadata.put("_target", getTargetUrl(dvObject));
- return metadata;
- }
-
- @Override
- public boolean alreadyRegistered(DvObject dvo) throws Exception {
- if(dvo==null) {
- logger.severe("Null DvObject sent to alreadyRegistered().");
- return false;
- }
- GlobalId globalId = dvo.getGlobalId();
- if(globalId == null) {
- return false;
- }
- return alreadyRegistered(globalId, false);
- }
-
- public abstract boolean alreadyRegistered(GlobalId globalId, boolean noProviderDefault) throws Exception;
-
- /*
- * ToDo: the DvObject being sent in provides partial support for the case where
- * it has a different authority/protocol than what is configured (i.e. a legacy
- * Pid that can actually be updated by the Pid account being used.) Removing
- * this now would potentially break/make it harder to handle that case prior to
- * support for configuring multiple Pid providers. Once that exists, it would be
- * cleaner to always find the PidProvider associated with the
- * protocol/authority/shoulder of the current dataset and then not pass the
- * DvObject as a param. (This would also remove calls to get the settings since
- * that would be done at construction.)
- */
- @Override
- public DvObject generateIdentifier(DvObject dvObject) {
-
- String protocol = dvObject.getProtocol() == null ? settingsService.getValueForKey(SettingsServiceBean.Key.Protocol) : dvObject.getProtocol();
- String authority = dvObject.getAuthority() == null ? settingsService.getValueForKey(SettingsServiceBean.Key.Authority) : dvObject.getAuthority();
- if (dvObject.isInstanceofDataset()) {
- dvObject.setIdentifier(generateDatasetIdentifier((Dataset) dvObject));
- } else {
- dvObject.setIdentifier(generateDataFileIdentifier((DataFile) dvObject));
- }
- if (dvObject.getProtocol() == null) {
- dvObject.setProtocol(protocol);
- }
- if (dvObject.getAuthority() == null) {
- dvObject.setAuthority(authority);
- }
- return dvObject;
- }
-
- //ToDo just send the DvObject.DType
- public String generateDatasetIdentifier(Dataset dataset) {
- //ToDo - track these in the bean
- String identifierType = settingsService.getValueForKey(SettingsServiceBean.Key.IdentifierGenerationStyle, "randomString");
- String shoulder = settingsService.getValueForKey(SettingsServiceBean.Key.Shoulder, "");
-
- switch (identifierType) {
- case "randomString":
- return generateIdentifierAsRandomString(dataset, shoulder);
- case "storedProcGenerated":
- return generateIdentifierFromStoredProcedureIndependent(dataset, shoulder);
- default:
- /* Should we throw an exception instead?? -- L.A. 4.6.2 */
- return generateIdentifierAsRandomString(dataset, shoulder);
- }
- }
-
-
- /**
- * Check that a identifier entered by the user is unique (not currently used
- * for any other study in this Dataverse Network) also check for duplicate
- * in EZID if needed
- * @param userIdentifier
- * @param dataset
- * @return {@code true} if the identifier is unique, {@code false} otherwise.
- */
- public boolean isGlobalIdUnique(GlobalId globalId) {
- if ( ! dvObjectService.isGlobalIdLocallyUnique(globalId) ) {
- return false; // duplication found in local database
- }
-
- // not in local DB, look in the persistent identifier service
- try {
- return ! alreadyRegistered(globalId, false);
- } catch (Exception e){
- //we can live with failure - means identifier not found remotely
- }
-
- return true;
- }
-
- /**
- * Parse a Persistent Id and set the protocol, authority, and identifier
- *
- * Example 1: doi:10.5072/FK2/BYM3IW
- * protocol: doi
- * authority: 10.5072
- * identifier: FK2/BYM3IW
- *
- * Example 2: hdl:1902.1/111012
- * protocol: hdl
- * authority: 1902.1
- * identifier: 111012
- *
- * @param identifierString
- * @param separator the string that separates the authority from the identifier.
- * @param destination the global id that will contain the parsed data.
- * @return {@code destination}, after its fields have been updated, or
- * {@code null} if parsing failed.
- */
- @Override
- public GlobalId parsePersistentId(String fullIdentifierString) {
- if(!isConfigured()) {
- return null;
- }
- // Occasionally, the protocol separator character ':' comes in still
- // URL-encoded as %3A (usually as a result of the URL having been
- // encoded twice):
- fullIdentifierString = fullIdentifierString.replace("%3A", ":");
-
- int index1 = fullIdentifierString.indexOf(':');
- if (index1 > 0) { // ':' found with one or more characters before it
- String protocol = fullIdentifierString.substring(0, index1);
- GlobalId globalId = parsePersistentId(protocol, fullIdentifierString.substring(index1+1));
- return globalId;
- }
- logger.log(Level.INFO, "Error parsing identifier: {0}: '':'' not found in string", fullIdentifierString);
- return null;
- }
-
- protected GlobalId parsePersistentId(String protocol, String identifierString) {
- if(!isConfigured()) {
- return null;
- }
- String authority;
- String identifier;
- if (identifierString == null) {
- return null;
- }
- int index = identifierString.indexOf('/');
- if (index > 0 && (index + 1) < identifierString.length()) {
- // '/' found with one or more characters
- // before and after it
- // Strip any whitespace, ; and ' from authority (should finding them cause a
- // failure instead?)
- authority = GlobalIdServiceBean.formatIdentifierString(identifierString.substring(0, index));
- if (GlobalIdServiceBean.testforNullTerminator(authority)) {
- return null;
- }
- identifier = GlobalIdServiceBean.formatIdentifierString(identifierString.substring(index + 1));
- if (GlobalIdServiceBean.testforNullTerminator(identifier)) {
- return null;
- }
- } else {
- logger.log(Level.INFO, "Error parsing identifier: {0}: '':/'' not found in string",
- identifierString);
- return null;
- }
- return parsePersistentId(protocol, authority, identifier);
- }
-
- public GlobalId parsePersistentId(String protocol, String authority, String identifier) {
- if(!isConfigured()) {
- return null;
- }
- logger.fine("Parsing: " + protocol + ":" + authority + getSeparator() + identifier + " in " + getProviderInformation().get(0));
- if(!GlobalIdServiceBean.isValidGlobalId(protocol, authority, identifier)) {
- return null;
- }
- return new GlobalId(protocol, authority, identifier, getSeparator(), getUrlPrefix(),
- getProviderInformation().get(0));
- }
-
-
- public String getSeparator() {
- //The standard default
- return "/";
- }
-
- @Override
- public String generateDataFileIdentifier(DataFile datafile) {
- String doiIdentifierType = settingsService.getValueForKey(SettingsServiceBean.Key.IdentifierGenerationStyle, "randomString");
- String doiDataFileFormat = settingsService.getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, SystemConfig.DataFilePIDFormat.DEPENDENT.toString());
-
- String prepend = "";
- if (doiDataFileFormat.equals(SystemConfig.DataFilePIDFormat.DEPENDENT.toString())){
- //If format is dependent then pre-pend the dataset identifier
- prepend = datafile.getOwner().getIdentifier() + "/";
- datafile.setProtocol(datafile.getOwner().getProtocol());
- datafile.setAuthority(datafile.getOwner().getAuthority());
- } else {
- //If there's a shoulder prepend independent identifiers with it
- prepend = settingsService.getValueForKey(SettingsServiceBean.Key.Shoulder, "");
- datafile.setProtocol(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol));
- datafile.setAuthority(settingsService.getValueForKey(SettingsServiceBean.Key.Authority));
- }
-
- switch (doiIdentifierType) {
- case "randomString":
- return generateIdentifierAsRandomString(datafile, prepend);
- case "storedProcGenerated":
- if (doiDataFileFormat.equals(SystemConfig.DataFilePIDFormat.INDEPENDENT.toString())){
- return generateIdentifierFromStoredProcedureIndependent(datafile, prepend);
- } else {
- return generateIdentifierFromStoredProcedureDependent(datafile, prepend);
- }
- default:
- /* Should we throw an exception instead?? -- L.A. 4.6.2 */
- return generateIdentifierAsRandomString(datafile, prepend);
- }
- }
-
-
- /*
- * This method checks locally for a DvObject with the same PID and if that is OK, checks with the PID service.
- * @param dvo - the object to check (ToDo - get protocol/authority from this PidProvider object)
- * @param prepend - for Datasets, this is always the shoulder, for DataFiles, it could be the shoulder or the parent Dataset identifier
- */
- private String generateIdentifierAsRandomString(DvObject dvo, String prepend) {
- String identifier = null;
- do {
- identifier = prepend + RandomStringUtils.randomAlphanumeric(6).toUpperCase();
- } while (!isGlobalIdUnique(new GlobalId(dvo.getProtocol(), dvo.getAuthority(), identifier, this.getSeparator(), this.getUrlPrefix(), this.getProviderInformation().get(0))));
-
- return identifier;
- }
-
- /*
- * This method checks locally for a DvObject with the same PID and if that is OK, checks with the PID service.
- * @param dvo - the object to check (ToDo - get protocol/authority from this PidProvider object)
- * @param prepend - for Datasets, this is always the shoulder, for DataFiles, it could be the shoulder or the parent Dataset identifier
- */
-
- private String generateIdentifierFromStoredProcedureIndependent(DvObject dvo, String prepend) {
- String identifier;
- do {
- String identifierFromStoredProcedure = dvObjectService.generateNewIdentifierByStoredProcedure();
- // some diagnostics here maybe - is it possible to determine that it's failing
- // because the stored procedure hasn't been created in the database?
- if (identifierFromStoredProcedure == null) {
- return null;
- }
- identifier = prepend + identifierFromStoredProcedure;
- } while (!isGlobalIdUnique(new GlobalId(dvo.getProtocol(), dvo.getAuthority(), identifier, this.getSeparator(), this.getUrlPrefix(), this.getProviderInformation().get(0))));
-
- return identifier;
- }
-
- /*This method is only used for DataFiles with DEPENDENT Pids. It is not for Datasets
- *
- */
- private String generateIdentifierFromStoredProcedureDependent(DataFile datafile, String prepend) {
- String identifier;
- Long retVal;
- retVal = Long.valueOf(0L);
- //ToDo - replace loops with one lookup for largest entry? (the do loop runs ~n**2/2 calls). The check for existingIdentifiers means this is mostly a local loop now, versus involving db or PidProvider calls, but still...)
-
- // This will catch identifiers already assigned in the current transaction (e.g.
- // in FinalizeDatasetPublicationCommand) that haven't been committed to the db
- // without having to make a call to the PIDProvider
- Set existingIdentifiers = new HashSet();
- List files = datafile.getOwner().getFiles();
- for(DataFile f:files) {
- existingIdentifiers.add(f.getIdentifier());
- }
-
- do {
- retVal++;
- identifier = prepend + retVal.toString();
-
- } while (existingIdentifiers.contains(identifier) || !isGlobalIdUnique(new GlobalId(datafile.getProtocol(), datafile.getAuthority(), identifier, this.getSeparator(), this.getUrlPrefix(), this.getProviderInformation().get(0))));
-
- return identifier;
- }
-
-
- class GlobalIdMetadataTemplate {
-
-
- private String template;
-
- public GlobalIdMetadataTemplate(){
- try (InputStream in = GlobalIdMetadataTemplate.class.getResourceAsStream("datacite_metadata_template.xml")) {
- template = Util.readAndClose(in, "utf-8");
- } catch (Exception e) {
- logger.log(Level.SEVERE, "datacite metadata template load error");
- logger.log(Level.SEVERE, "String " + e.toString());
- logger.log(Level.SEVERE, "localized message " + e.getLocalizedMessage());
- logger.log(Level.SEVERE, "cause " + e.getCause());
- logger.log(Level.SEVERE, "message " + e.getMessage());
- }
- }
-
- private String xmlMetadata;
- private String identifier;
- private List datafileIdentifiers;
- private List creators;
- private String title;
- private String publisher;
- private String publisherYear;
- private List authors;
- private String description;
- private List contacts;
- private List producers;
-
- public List getProducers() {
- return producers;
- }
-
- public void setProducers(List producers) {
- this.producers = producers;
- }
-
- public List getContacts() {
- return contacts;
- }
-
- public void setContacts(List contacts) {
- this.contacts = contacts;
- }
-
- public String getDescription() {
- return description;
- }
-
- public void setDescription(String description) {
- this.description = description;
- }
-
- public List getAuthors() {
- return authors;
- }
-
- public void setAuthors(List authors) {
- this.authors = authors;
- }
-
-
- public List getDatafileIdentifiers() {
- return datafileIdentifiers;
- }
-
- public void setDatafileIdentifiers(List datafileIdentifiers) {
- this.datafileIdentifiers = datafileIdentifiers;
- }
-
- public GlobalIdMetadataTemplate(String xmlMetaData) {
- this.xmlMetadata = xmlMetaData;
- Document doc = Jsoup.parseBodyFragment(xmlMetaData);
- Elements identifierElements = doc.select("identifier");
- if (identifierElements.size() > 0) {
- identifier = identifierElements.get(0).html();
- }
- Elements creatorElements = doc.select("creatorName");
- creators = new ArrayList<>();
- for (Element creatorElement : creatorElements) {
- creators.add(creatorElement.html());
- }
- Elements titleElements = doc.select("title");
- if (titleElements.size() > 0) {
- title = titleElements.get(0).html();
- }
- Elements publisherElements = doc.select("publisher");
- if (publisherElements.size() > 0) {
- publisher = publisherElements.get(0).html();
- }
- Elements publisherYearElements = doc.select("publicationYear");
- if (publisherYearElements.size() > 0) {
- publisherYear = publisherYearElements.get(0).html();
- }
- }
-
- public String generateXML(DvObject dvObject) {
- // Can't use "UNKNOWN" here because DataCite will respond with "[facet 'pattern'] the value 'unknown' is not accepted by the pattern '[\d]{4}'"
- String publisherYearFinal = "9999";
- // FIXME: Investigate why this.publisherYear is sometimes null now that pull request #4606 has been merged.
- if (this.publisherYear != null) {
- // Added to prevent a NullPointerException when trying to destroy datasets when using DataCite rather than EZID.
- publisherYearFinal = this.publisherYear;
- }
- xmlMetadata = template.replace("${identifier}", getIdentifier().trim())
- .replace("${title}", this.title)
- .replace("${publisher}", this.publisher)
- .replace("${publisherYear}", publisherYearFinal)
- .replace("${description}", this.description);
- StringBuilder creatorsElement = new StringBuilder();
- for (DatasetAuthor author : authors) {
- creatorsElement.append("");
- creatorsElement.append(author.getName().getDisplayValue());
- creatorsElement.append("");
-
- if (author.getIdType() != null && author.getIdValue() != null && !author.getIdType().isEmpty() && !author.getIdValue().isEmpty() && author.getAffiliation() != null && !author.getAffiliation().getDisplayValue().isEmpty()) {
-
- if (author.getIdType().equals("ORCID")) {
- creatorsElement.append("" + author.getIdValue() + "");
- }
- if (author.getIdType().equals("ISNI")) {
- creatorsElement.append("" + author.getIdValue() + "");
- }
- if (author.getIdType().equals("LCNA")) {
- creatorsElement.append("" + author.getIdValue() + "");
- }
- }
- if (author.getAffiliation() != null && !author.getAffiliation().getDisplayValue().isEmpty()) {
- creatorsElement.append("" + author.getAffiliation().getDisplayValue() + "");
- }
- creatorsElement.append("");
- }
- xmlMetadata = xmlMetadata.replace("${creators}", creatorsElement.toString());
-
- StringBuilder contributorsElement = new StringBuilder();
- for (String[] contact : this.getContacts()) {
- if (!contact[0].isEmpty()) {
- contributorsElement.append("" + contact[0] + "");
- if (!contact[1].isEmpty()) {
- contributorsElement.append("" + contact[1] + "");
- }
- contributorsElement.append("");
- }
- }
- for (String[] producer : this.getProducers()) {
- contributorsElement.append("" + producer[0] + "");
- if (!producer[1].isEmpty()) {
- contributorsElement.append("" + producer[1] + "");
- }
- contributorsElement.append("");
- }
-
- String relIdentifiers = generateRelatedIdentifiers(dvObject);
-
- xmlMetadata = xmlMetadata.replace("${relatedIdentifiers}", relIdentifiers);
-
- xmlMetadata = xmlMetadata.replace("{$contributors}", contributorsElement.toString());
- return xmlMetadata;
- }
-
- private String generateRelatedIdentifiers(DvObject dvObject) {
-
- StringBuilder sb = new StringBuilder();
- if (dvObject.isInstanceofDataset()) {
- Dataset dataset = (Dataset) dvObject;
- if (!dataset.getFiles().isEmpty() && !(dataset.getFiles().get(0).getIdentifier() == null)) {
-
- datafileIdentifiers = new ArrayList<>();
- for (DataFile dataFile : dataset.getFiles()) {
- if (!dataFile.getGlobalId().asString().isEmpty()) {
- if (sb.toString().isEmpty()) {
- sb.append("");
- }
- sb.append("" + dataFile.getGlobalId() + "");
- }
- }
-
- if (!sb.toString().isEmpty()) {
- sb.append("");
- }
- }
- } else if (dvObject.isInstanceofDataFile()) {
- DataFile df = (DataFile) dvObject;
- sb.append("");
- sb.append("" + df.getOwner().getGlobalId() + "");
- sb.append("");
- }
- return sb.toString();
- }
-
- public void generateFileIdentifiers(DvObject dvObject) {
-
- if (dvObject.isInstanceofDataset()) {
- Dataset dataset = (Dataset) dvObject;
-
- if (!dataset.getFiles().isEmpty() && !(dataset.getFiles().get(0).getIdentifier() == null)) {
-
- datafileIdentifiers = new ArrayList<>();
- for (DataFile dataFile : dataset.getFiles()) {
- datafileIdentifiers.add(dataFile.getIdentifier());
- int x = xmlMetadata.indexOf("") - 1;
- xmlMetadata = xmlMetadata.replace("{relatedIdentifier}", dataFile.getIdentifier());
- xmlMetadata = xmlMetadata.substring(0, x) + "${relatedIdentifier}" + template.substring(x, template.length() - 1);
-
- }
-
- } else {
- xmlMetadata = xmlMetadata.replace("${relatedIdentifier}", "");
- }
- }
- }
-
- public String getTemplate() {
- return template;
- }
-
- public void setTemplate(String templateIn) {
- template = templateIn;
- }
-
- public String getIdentifier() {
- return identifier;
- }
-
- public void setIdentifier(String identifier) {
- this.identifier = identifier;
- }
-
- public List getCreators() {
- return creators;
- }
-
- public void setCreators(List creators) {
- this.creators = creators;
- }
-
- public String getTitle() {
- return title;
- }
-
- public void setTitle(String title) {
- this.title = title;
- }
-
- public String getPublisher() {
- return publisher;
- }
-
- public void setPublisher(String publisher) {
- this.publisher = publisher;
- }
-
- public String getPublisherYear() {
- return publisherYear;
- }
-
- public void setPublisherYear(String publisherYear) {
- this.publisherYear = publisherYear;
- }
-}
- public String getMetadataFromDvObject(String identifier, Map metadata, DvObject dvObject) {
-
- Dataset dataset = null;
-
- if (dvObject instanceof Dataset) {
- dataset = (Dataset) dvObject;
- } else {
- dataset = (Dataset) dvObject.getOwner();
- }
-
- GlobalIdMetadataTemplate metadataTemplate = new GlobalIdMetadataTemplate();
- metadataTemplate.setIdentifier(identifier.substring(identifier.indexOf(':') + 1));
- metadataTemplate.setCreators(Util.getListFromStr(metadata.get("datacite.creator")));
- metadataTemplate.setAuthors(dataset.getLatestVersion().getDatasetAuthors());
- if (dvObject.isInstanceofDataset()) {
- metadataTemplate.setDescription(dataset.getLatestVersion().getDescriptionPlainText());
- }
- if (dvObject.isInstanceofDataFile()) {
- DataFile df = (DataFile) dvObject;
- String fileDescription = df.getDescription();
- metadataTemplate.setDescription(fileDescription == null ? "" : fileDescription);
- }
-
- metadataTemplate.setContacts(dataset.getLatestVersion().getDatasetContacts());
- metadataTemplate.setProducers(dataset.getLatestVersion().getDatasetProducers());
- metadataTemplate.setTitle(dvObject.getCurrentName());
- String producerString = dataverseService.getRootDataverseName();
- if (producerString.isEmpty() || producerString.equals(DatasetField.NA_VALUE) ) {
- producerString = UNAVAILABLE;
- }
- metadataTemplate.setPublisher(producerString);
- metadataTemplate.setPublisherYear(metadata.get("datacite.publicationyear"));
-
- String xmlMetadata = metadataTemplate.generateXML(dvObject);
- logger.log(Level.FINE, "XML to send to DataCite: {0}", xmlMetadata);
- return xmlMetadata;
- }
-
- @Override
- public boolean canManagePID() {
- //The default expectation is that PID providers are configured to manage some set (i.e. based on protocol/authority/shoulder) of PIDs
- return true;
- }
-
- @Override
- public boolean isConfigured() {
- if(configured==null) {
- return false;
- } else {
- return configured.booleanValue();
- }
- }
-}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterCache.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterCache.java
deleted file mode 100644
index 7c75b1a4da6..00000000000
--- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterCache.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * To change this license header, choose License Headers in Project Properties.
- * To change this template file, choose Tools | Templates
- * and open the template in the editor.
- */
-package edu.harvard.iq.dataverse;
-
-
-import java.io.Serializable;
-import jakarta.persistence.Column;
-import jakarta.persistence.Entity;
-import jakarta.persistence.GeneratedValue;
-import jakarta.persistence.GenerationType;
-import jakarta.persistence.Id;
-import jakarta.persistence.Lob;
-import jakarta.persistence.NamedQueries;
-import jakarta.persistence.NamedQuery;
-import org.hibernate.validator.constraints.NotBlank;
-
-/**
- *
- * @author luopc
- */
-@NamedQueries(
- @NamedQuery( name="DOIDataCiteRegisterCache.findByDoi",
- query="SELECT d FROM DOIDataCiteRegisterCache d WHERE d.doi=:doi")
-)
-@Entity
-public class DOIDataCiteRegisterCache implements Serializable{
-
- private static final long serialVersionUID = 8030143094734315681L;
-
- @Id
- @GeneratedValue(strategy = GenerationType.IDENTITY)
- private Long id;
-
- @NotBlank
- @Column(unique=true)
- private String doi;
-
- @NotBlank
- private String url;
-
- @NotBlank
- private String status;
-
- @NotBlank
- @Lob
- private String xml;
-
- public Long getId() {
- return id;
- }
-
- public void setId(Long id) {
- this.id = id;
- }
-
- public String getDoi() {
- return doi;
- }
-
- public void setDoi(String doi) {
- this.doi = doi;
- }
-
- public String getStatus() {
- return status;
- }
-
- public void setStatus(String status) {
- this.status = status;
- }
-
- public String getXml() {
- return xml;
- }
-
- public void setXml(String xml) {
- this.xml = xml;
- }
-
- public String getUrl() {
- return url;
- }
-
- public void setUrl(String url) {
- this.url = url;
- }
-}
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java
deleted file mode 100644
index 9ecc4a3ecc9..00000000000
--- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java
+++ /dev/null
@@ -1,707 +0,0 @@
-/*
- * To change this license header, choose License Headers in Project Properties.
- * To change this template file, choose Tools | Templates
- * and open the template in the editor.
- */
-package edu.harvard.iq.dataverse;
-
-import edu.harvard.iq.dataverse.branding.BrandingUtil;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.UnsupportedEncodingException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-import jakarta.ejb.EJB;
-import jakarta.ejb.Stateless;
-import jakarta.persistence.EntityManager;
-import jakarta.persistence.PersistenceContext;
-import jakarta.persistence.TypedQuery;
-
-import edu.harvard.iq.dataverse.settings.JvmSettings;
-import org.apache.commons.text.StringEscapeUtils;
-import org.jsoup.Jsoup;
-import org.jsoup.nodes.Document;
-import org.jsoup.nodes.Element;
-import org.jsoup.select.Elements;
-
-/**
- *
- * @author luopc
- */
-@Stateless
-public class DOIDataCiteRegisterService {
-
- private static final Logger logger = Logger.getLogger(DOIDataCiteRegisterService.class.getCanonicalName());
-
- @PersistenceContext(unitName = "VDCNet-ejbPU")
- private EntityManager em;
-
- @EJB
- DataverseServiceBean dataverseService;
-
- @EJB
- DOIDataCiteServiceBean doiDataCiteServiceBean;
-
-
- //A singleton since it, and the httpClient in it can be reused.
- private DataCiteRESTfullClient client=null;
-
- private DataCiteRESTfullClient getClient() throws IOException {
- if (client == null) {
- client = new DataCiteRESTfullClient(
- JvmSettings.DATACITE_MDS_API_URL.lookup(),
- JvmSettings.DATACITE_USERNAME.lookup(),
- JvmSettings.DATACITE_PASSWORD.lookup()
- );
- }
- return client;
- }
-
- /**
- * This method is deprecated and unused. We switched away from this method
- * when adjusting the code to reserve DOIs from DataCite on dataset create.
- *
- * Note that the DOIDataCiteRegisterCache entity/table used in this method
- * might be a candidate for deprecation as well. Removing it would require
- * some refactoring as it is used throughout the DataCite code.
- */
- @Deprecated
- public String createIdentifierLocal(String identifier, Map metadata, DvObject dvObject) {
-
- String xmlMetadata = getMetadataFromDvObject(identifier, metadata, dvObject);
- String status = metadata.get("_status").trim();
- String target = metadata.get("_target");
- String retString = "";
- DOIDataCiteRegisterCache rc = findByDOI(identifier);
- if (rc == null) {
- rc = new DOIDataCiteRegisterCache();
- rc.setDoi(identifier);
- rc.setXml(xmlMetadata);
- rc.setStatus("reserved");
- rc.setUrl(target);
- em.persist(rc);
- } else {
- rc.setDoi(identifier);
- rc.setXml(xmlMetadata);
- rc.setStatus("reserved");
- rc.setUrl(target);
- }
- retString = "success to reserved " + identifier;
-
- return retString;
- }
-
- /**
- * This "reserveIdentifier" method is heavily based on the
- * "registerIdentifier" method below but doesn't, this one doesn't doesn't
- * register a URL, which causes the "state" of DOI to transition from
- * "draft" to "findable". Here are some DataCite docs on the matter:
- *
- * "DOIs can exist in three states: draft, registered, and findable. DOIs
- * are in the draft state when metadata have been registered, and will
- * transition to the findable state when registering a URL." --
- * https://support.datacite.org/docs/mds-api-guide#doi-states
- */
- public String reserveIdentifier(String identifier, Map metadata, DvObject dvObject) throws IOException {
- String retString = "";
- String xmlMetadata = getMetadataFromDvObject(identifier, metadata, dvObject);
- DOIDataCiteRegisterCache rc = findByDOI(identifier);
- String target = metadata.get("_target");
- if (rc != null) {
- rc.setDoi(identifier);
- rc.setXml(xmlMetadata);
- // DataCite uses the term "draft" instead of "reserved".
- rc.setStatus("reserved");
- if (target == null || target.trim().length() == 0) {
- target = rc.getUrl();
- } else {
- rc.setUrl(target);
- }
- }
-
- DataCiteRESTfullClient client = getClient();
- retString = client.postMetadata(xmlMetadata);
-
- return retString;
- }
-
- public String registerIdentifier(String identifier, Map metadata, DvObject dvObject) throws IOException {
- String retString = "";
- String xmlMetadata = getMetadataFromDvObject(identifier, metadata, dvObject);
- DOIDataCiteRegisterCache rc = findByDOI(identifier);
- String target = metadata.get("_target");
- if (rc != null) {
- rc.setDoi(identifier);
- rc.setXml(xmlMetadata);
- rc.setStatus("public");
- if (target == null || target.trim().length() == 0) {
- target = rc.getUrl();
- } else {
- rc.setUrl(target);
- }
- }
-
- DataCiteRESTfullClient client = getClient();
- retString = client.postMetadata(xmlMetadata);
- client.postUrl(identifier.substring(identifier.indexOf(":") + 1), target);
-
- return retString;
- }
-
- public String deactivateIdentifier(String identifier, Map metadata, DvObject dvObject) throws IOException {
- String retString = "";
-
- String metadataString = getMetadataForDeactivateIdentifier(identifier, metadata, dvObject);
- retString = client.postMetadata(metadataString);
- retString = client.inactiveDataset(identifier.substring(identifier.indexOf(":") + 1));
-
- return retString;
- }
-
- public static String getMetadataFromDvObject(String identifier, Map metadata, DvObject dvObject) {
-
- Dataset dataset = null;
-
- if (dvObject instanceof Dataset) {
- dataset = (Dataset) dvObject;
- } else {
- dataset = (Dataset) dvObject.getOwner();
- }
-
- DataCiteMetadataTemplate metadataTemplate = new DataCiteMetadataTemplate();
- metadataTemplate.setIdentifier(identifier.substring(identifier.indexOf(':') + 1));
- metadataTemplate.setCreators(Util.getListFromStr(metadata.get("datacite.creator")));
- metadataTemplate.setAuthors(dataset.getLatestVersion().getDatasetAuthors());
- if (dvObject.isInstanceofDataset()) {
- //While getDescriptionPlainText strips < and > from HTML, it leaves '&' (at least so we need to xml escape as well
- String description = StringEscapeUtils.escapeXml10(dataset.getLatestVersion().getDescriptionPlainText());
- if (description.isEmpty() || description.equals(DatasetField.NA_VALUE)) {
- description = AbstractGlobalIdServiceBean.UNAVAILABLE;
- }
- metadataTemplate.setDescription(description);
- }
- if (dvObject.isInstanceofDataFile()) {
- DataFile df = (DataFile) dvObject;
- //Note: File metadata is not escaped like dataset metadata is, so adding an xml escape here.
- //This could/should be removed if the datafile methods add escaping
- String fileDescription = StringEscapeUtils.escapeXml10(df.getDescription());
- metadataTemplate.setDescription(fileDescription == null ? AbstractGlobalIdServiceBean.UNAVAILABLE : fileDescription);
- String datasetPid = df.getOwner().getGlobalId().asString();
- metadataTemplate.setDatasetIdentifier(datasetPid);
- } else {
- metadataTemplate.setDatasetIdentifier("");
- }
-
- metadataTemplate.setContacts(dataset.getLatestVersion().getDatasetContacts());
- metadataTemplate.setProducers(dataset.getLatestVersion().getDatasetProducers());
- String title = dvObject.getCurrentName();
- if(dvObject.isInstanceofDataFile()) {
- //Note file title is not currently escaped the way the dataset title is, so adding it here.
- title = StringEscapeUtils.escapeXml10(title);
- }
-
- if (title.isEmpty() || title.equals(DatasetField.NA_VALUE)) {
- title = AbstractGlobalIdServiceBean.UNAVAILABLE;
- }
-
- metadataTemplate.setTitle(title);
- String producerString = BrandingUtil.getRootDataverseCollectionName();
- if (producerString.isEmpty() || producerString.equals(DatasetField.NA_VALUE)) {
- producerString = AbstractGlobalIdServiceBean.UNAVAILABLE;
- }
- metadataTemplate.setPublisher(producerString);
- metadataTemplate.setPublisherYear(metadata.get("datacite.publicationyear"));
-
- String xmlMetadata = metadataTemplate.generateXML(dvObject);
- logger.log(Level.FINE, "XML to send to DataCite: {0}", xmlMetadata);
- return xmlMetadata;
- }
-
- public static String getMetadataForDeactivateIdentifier(String identifier, Map metadata, DvObject dvObject) {
-
- DataCiteMetadataTemplate metadataTemplate = new DataCiteMetadataTemplate();
- metadataTemplate.setIdentifier(identifier.substring(identifier.indexOf(':') + 1));
- metadataTemplate.setCreators(Util.getListFromStr(metadata.get("datacite.creator")));
-
- metadataTemplate.setDescription(AbstractGlobalIdServiceBean.UNAVAILABLE);
-
- String title =metadata.get("datacite.title");
-
- System.out.print("Map metadata title: "+ metadata.get("datacite.title"));
-
- metadataTemplate.setAuthors(null);
-
- metadataTemplate.setTitle(title);
- String producerString = AbstractGlobalIdServiceBean.UNAVAILABLE;
-
- metadataTemplate.setPublisher(producerString);
- metadataTemplate.setPublisherYear(metadata.get("datacite.publicationyear"));
-
- String xmlMetadata = metadataTemplate.generateXML(dvObject);
- logger.log(Level.FINE, "XML to send to DataCite: {0}", xmlMetadata);
- return xmlMetadata;
- }
-
- public String modifyIdentifier(String identifier, HashMap metadata, DvObject dvObject) throws IOException {
-
- String xmlMetadata = getMetadataFromDvObject(identifier, metadata, dvObject);
-
- logger.fine("XML to send to DataCite: " + xmlMetadata);
-
- String status = metadata.get("_status").trim();
- String target = metadata.get("_target");
- String retString = "";
- if (status.equals("reserved")) {
- DOIDataCiteRegisterCache rc = findByDOI(identifier);
- if (rc == null) {
- rc = new DOIDataCiteRegisterCache();
- rc.setDoi(identifier);
- rc.setXml(xmlMetadata);
- rc.setStatus("reserved");
- rc.setUrl(target);
- em.persist(rc);
- } else {
- rc.setDoi(identifier);
- rc.setXml(xmlMetadata);
- rc.setStatus("reserved");
- rc.setUrl(target);
- }
- retString = "success to reserved " + identifier;
- } else if (status.equals("public")) {
- DOIDataCiteRegisterCache rc = findByDOI(identifier);
- if (rc != null) {
- rc.setDoi(identifier);
- rc.setXml(xmlMetadata);
- rc.setStatus("public");
- if (target == null || target.trim().length() == 0) {
- target = rc.getUrl();
- } else {
- rc.setUrl(target);
- }
- try {
- DataCiteRESTfullClient client = getClient();
- retString = client.postMetadata(xmlMetadata);
- client.postUrl(identifier.substring(identifier.indexOf(":") + 1), target);
-
- } catch (UnsupportedEncodingException ex) {
- logger.log(Level.SEVERE, null, ex);
-
- } catch (RuntimeException rte) {
- logger.log(Level.SEVERE, "Error creating DOI at DataCite: {0}", rte.getMessage());
- logger.log(Level.SEVERE, "Exception", rte);
-
- }
- }
- } else if (status.equals("unavailable")) {
- DOIDataCiteRegisterCache rc = findByDOI(identifier);
- try {
- DataCiteRESTfullClient client = getClient();
- if (rc != null) {
- rc.setStatus("unavailable");
- retString = client.inactiveDataset(identifier.substring(identifier.indexOf(":") + 1));
- }
- } catch (IOException io) {
-
- }
- }
- return retString;
- }
-
- public boolean testDOIExists(String identifier) {
- boolean doiExists;
- try {
- DataCiteRESTfullClient client = getClient();
- doiExists = client.testDOIExists(identifier.substring(identifier.indexOf(":") + 1));
- } catch (Exception e) {
- logger.log(Level.INFO, identifier, e);
- return false;
- }
- return doiExists;
- }
-
- public HashMap getMetadata(String identifier) throws IOException {
- HashMap metadata = new HashMap<>();
- try {
- DataCiteRESTfullClient client = getClient();
- String xmlMetadata = client.getMetadata(identifier.substring(identifier.indexOf(":") + 1));
- DOIDataCiteServiceBean.GlobalIdMetadataTemplate template = doiDataCiteServiceBean.new GlobalIdMetadataTemplate(xmlMetadata);
- metadata.put("datacite.creator", Util.getStrFromList(template.getCreators()));
- metadata.put("datacite.title", template.getTitle());
- metadata.put("datacite.publisher", template.getPublisher());
- metadata.put("datacite.publicationyear", template.getPublisherYear());
- DOIDataCiteRegisterCache rc = findByDOI(identifier);
- if (rc != null) {
- metadata.put("_status", rc.getStatus());
- } else {
- metadata.put("_status", "public");
- }
- } catch (RuntimeException e) {
- logger.log(Level.INFO, identifier, e);
- }
- return metadata;
- }
-
- public DOIDataCiteRegisterCache findByDOI(String doi) {
- TypedQuery query = em.createNamedQuery("DOIDataCiteRegisterCache.findByDoi",
- DOIDataCiteRegisterCache.class);
- query.setParameter("doi", doi);
- List rc = query.getResultList();
- if (rc.size() == 1) {
- return rc.get(0);
- }
- return null;
- }
-
- public void deleteIdentifier(String identifier) {
- DOIDataCiteRegisterCache rc = findByDOI(identifier);
- if (rc != null) {
- em.remove(rc);
- }
- }
-
-}
-
-class DataCiteMetadataTemplate {
-
- private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.DataCiteMetadataTemplate");
- private static String template;
-
- static {
- try (InputStream in = DataCiteMetadataTemplate.class.getResourceAsStream("datacite_metadata_template.xml")) {
- template = Util.readAndClose(in, "utf-8");
- } catch (Exception e) {
- logger.log(Level.SEVERE, "datacite metadata template load error");
- logger.log(Level.SEVERE, "String " + e.toString());
- logger.log(Level.SEVERE, "localized message " + e.getLocalizedMessage());
- logger.log(Level.SEVERE, "cause " + e.getCause());
- logger.log(Level.SEVERE, "message " + e.getMessage());
- }
- }
-
- private String xmlMetadata;
- private String identifier;
- private String datasetIdentifier;
- private List datafileIdentifiers;
- private List creators;
- private String title;
- private String publisher;
- private String publisherYear;
- private List authors;
- private String description;
- private List contacts;
- private List producers;
-
- public List getProducers() {
- return producers;
- }
-
- public void setProducers(List producers) {
- this.producers = producers;
- }
-
- public List getContacts() {
- return contacts;
- }
-
- public void setContacts(List contacts) {
- this.contacts = contacts;
- }
-
- public String getDescription() {
- return description;
- }
-
- public void setDescription(String description) {
- this.description = description;
- }
-
- public List getAuthors() {
- return authors;
- }
-
- public void setAuthors(List authors) {
- this.authors = authors;
- }
-
- public DataCiteMetadataTemplate() {
- }
-
- public List getDatafileIdentifiers() {
- return datafileIdentifiers;
- }
-
- public void setDatafileIdentifiers(List datafileIdentifiers) {
- this.datafileIdentifiers = datafileIdentifiers;
- }
-
- public DataCiteMetadataTemplate(String xmlMetaData) {
- this.xmlMetadata = xmlMetaData;
- Document doc = Jsoup.parseBodyFragment(xmlMetaData);
- Elements identifierElements = doc.select("identifier");
- if (identifierElements.size() > 0) {
- identifier = identifierElements.get(0).html();
- }
- Elements creatorElements = doc.select("creatorName");
- creators = new ArrayList<>();
- for (Element creatorElement : creatorElements) {
- creators.add(creatorElement.html());
- }
- Elements titleElements = doc.select("title");
- if (titleElements.size() > 0) {
- title = titleElements.get(0).html();
- }
- Elements publisherElements = doc.select("publisher");
- if (publisherElements.size() > 0) {
- publisher = publisherElements.get(0).html();
- }
- Elements publisherYearElements = doc.select("publicationYear");
- if (publisherYearElements.size() > 0) {
- publisherYear = publisherYearElements.get(0).html();
- }
- }
-
- public String generateXML(DvObject dvObject) {
- // Can't use "UNKNOWN" here because DataCite will respond with "[facet 'pattern'] the value 'unknown' is not accepted by the pattern '[\d]{4}'"
- String publisherYearFinal = "9999";
- // FIXME: Investigate why this.publisherYear is sometimes null now that pull request #4606 has been merged.
- if (this.publisherYear != null) {
- // Added to prevent a NullPointerException when trying to destroy datasets when using DataCite rather than EZID.
- publisherYearFinal = this.publisherYear;
- }
- xmlMetadata = template.replace("${identifier}", this.identifier.trim())
- .replace("${title}", this.title)
- .replace("${publisher}", this.publisher)
- .replace("${publisherYear}", publisherYearFinal)
- .replace("${description}", this.description);
-
- StringBuilder creatorsElement = new StringBuilder();
- if (authors!= null && !authors.isEmpty()) {
- for (DatasetAuthor author : authors) {
- creatorsElement.append("");
- creatorsElement.append(author.getName().getDisplayValue());
- creatorsElement.append("");
-
- if (author.getIdType() != null && author.getIdValue() != null && !author.getIdType().isEmpty() && !author.getIdValue().isEmpty() && author.getAffiliation() != null && !author.getAffiliation().getDisplayValue().isEmpty()) {
-
- if (author.getIdType().equals("ORCID")) {
- creatorsElement.append("" + author.getIdValue() + "");
- }
- if (author.getIdType().equals("ISNI")) {
- creatorsElement.append("" + author.getIdValue() + "");
- }
- if (author.getIdType().equals("LCNA")) {
- creatorsElement.append("" + author.getIdValue() + "");
- }
- }
- if (author.getAffiliation() != null && !author.getAffiliation().getDisplayValue().isEmpty()) {
- creatorsElement.append("" + author.getAffiliation().getDisplayValue() + "");
- }
- creatorsElement.append("");
- }
-
- } else {
- creatorsElement.append("").append(AbstractGlobalIdServiceBean.UNAVAILABLE).append("");
- }
-
- xmlMetadata = xmlMetadata.replace("${creators}", creatorsElement.toString());
-
- StringBuilder contributorsElement = new StringBuilder();
- if (this.getContacts() != null) {
- for (String[] contact : this.getContacts()) {
- if (!contact[0].isEmpty()) {
- contributorsElement.append("" + contact[0] + "");
- if (!contact[1].isEmpty()) {
- contributorsElement.append("" + contact[1] + "");
- }
- contributorsElement.append("");
- }
- }
- }
-
- if (this.getProducers() != null) {
- for (String[] producer : this.getProducers()) {
- contributorsElement.append("" + producer[0] + "");
- if (!producer[1].isEmpty()) {
- contributorsElement.append("" + producer[1] + "");
- }
- contributorsElement.append("");
- }
- }
-
- String relIdentifiers = generateRelatedIdentifiers(dvObject);
-
- xmlMetadata = xmlMetadata.replace("${relatedIdentifiers}", relIdentifiers);
-
- xmlMetadata = xmlMetadata.replace("{$contributors}", contributorsElement.toString());
- return xmlMetadata;
- }
-
- private String generateRelatedIdentifiers(DvObject dvObject) {
-
- StringBuilder sb = new StringBuilder();
- if (dvObject.isInstanceofDataset()) {
- Dataset dataset = (Dataset) dvObject;
- if (!dataset.getFiles().isEmpty() && !(dataset.getFiles().get(0).getIdentifier() == null)) {
-
- datafileIdentifiers = new ArrayList<>();
- for (DataFile dataFile : dataset.getFiles()) {
- if (dataFile.getGlobalId() != null) {
- if (sb.toString().isEmpty()) {
- sb.append("");
- }
- sb.append("" + dataFile.getGlobalId() + "");
- }
- }
-
- if (!sb.toString().isEmpty()) {
- sb.append("");
- }
- }
- } else if (dvObject.isInstanceofDataFile()) {
- DataFile df = (DataFile) dvObject;
- sb.append("");
- sb.append("" + df.getOwner().getGlobalId() + "");
- sb.append("");
- }
- return sb.toString();
- }
-
- public void generateFileIdentifiers(DvObject dvObject) {
-
- if (dvObject.isInstanceofDataset()) {
- Dataset dataset = (Dataset) dvObject;
-
- if (!dataset.getFiles().isEmpty() && !(dataset.getFiles().get(0).getIdentifier() == null)) {
-
- datafileIdentifiers = new ArrayList<>();
- for (DataFile dataFile : dataset.getFiles()) {
- datafileIdentifiers.add(dataFile.getIdentifier());
- int x = xmlMetadata.indexOf("") - 1;
- xmlMetadata = xmlMetadata.replace("{relatedIdentifier}", dataFile.getIdentifier());
- xmlMetadata = xmlMetadata.substring(0, x) + "${relatedIdentifier}" + template.substring(x, template.length() - 1);
-
- }
-
- } else {
- xmlMetadata = xmlMetadata.replace("${relatedIdentifier}", "");
- }
- }
- }
-
- public static String getTemplate() {
- return template;
- }
-
- public static void setTemplate(String template) {
- DataCiteMetadataTemplate.template = template;
- }
-
- public String getIdentifier() {
- return identifier;
- }
-
- public void setIdentifier(String identifier) {
- this.identifier = identifier;
- }
-
- public void setDatasetIdentifier(String datasetIdentifier) {
- this.datasetIdentifier = datasetIdentifier;
- }
-
- public List getCreators() {
- return creators;
- }
-
- public void setCreators(List creators) {
- this.creators = creators;
- }
-
- public String getTitle() {
- return title;
- }
-
- public void setTitle(String title) {
- this.title = title;
- }
-
- public String getPublisher() {
- return publisher;
- }
-
- public void setPublisher(String publisher) {
- this.publisher = publisher;
- }
-
- public String getPublisherYear() {
- return publisherYear;
- }
-
- public void setPublisherYear(String publisherYear) {
- this.publisherYear = publisherYear;
- }
-}
-
-class Util {
-
- public static void close(InputStream in) {
- if (in != null) {
- try {
- in.close();
- } catch (IOException e) {
- throw new RuntimeException("Fail to close InputStream");
- }
- }
- }
-
- public static String readAndClose(InputStream inStream, String encoding) {
- ByteArrayOutputStream outStream = new ByteArrayOutputStream();
- byte[] buf = new byte[128];
- String data;
- try {
- int cnt;
- while ((cnt = inStream.read(buf)) >= 0) {
- outStream.write(buf, 0, cnt);
- }
- data = outStream.toString(encoding);
- } catch (IOException ioe) {
- throw new RuntimeException("IOException");
- } finally {
- close(inStream);
- }
- return data;
- }
-
- public static List getListFromStr(String str) {
- return Arrays.asList(str.split("; "));
-// List authors = new ArrayList();
-// int preIdx = 0;
-// for(int i=0;i authors) {
- StringBuilder str = new StringBuilder();
- for (String author : authors) {
- if (str.length() > 0) {
- str.append("; ");
- }
- str.append(author);
- }
- return str.toString();
- }
-
-}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java
deleted file mode 100644
index 48786b41824..00000000000
--- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java
+++ /dev/null
@@ -1,248 +0,0 @@
-package edu.harvard.iq.dataverse;
-
-import java.io.IOException;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.util.Base64;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import jakarta.ejb.EJB;
-import jakarta.ejb.Stateless;
-
-import edu.harvard.iq.dataverse.settings.JvmSettings;
-import org.apache.commons.httpclient.HttpException;
-import org.apache.commons.httpclient.HttpStatus;
-
-
-/**
- *
- * @author luopc
- */
-@Stateless
-public class DOIDataCiteServiceBean extends DOIServiceBean {
-
- private static final Logger logger = Logger.getLogger(DOIDataCiteServiceBean.class.getCanonicalName());
-
- private static final String PUBLIC = "public";
- private static final String FINDABLE = "findable";
- private static final String RESERVED = "reserved";
- private static final String DRAFT = "draft";
-
- @EJB
- DOIDataCiteRegisterService doiDataCiteRegisterService;
-
- @Override
- public boolean registerWhenPublished() {
- return false;
- }
-
-
-
- @Override
- public boolean alreadyRegistered(GlobalId pid, boolean noProviderDefault) {
- logger.log(Level.FINE,"alreadyRegistered");
- if(pid==null || pid.asString().isEmpty()) {
- logger.fine("No identifier sent.");
- return false;
- }
- boolean alreadyRegistered;
- String identifier = pid.asString();
- try{
- alreadyRegistered = doiDataCiteRegisterService.testDOIExists(identifier);
- } catch (Exception e){
- logger.log(Level.WARNING, "alreadyRegistered failed");
- return false;
- }
- return alreadyRegistered;
- }
-
- @Override
- public String createIdentifier(DvObject dvObject) throws Exception {
- logger.log(Level.FINE,"createIdentifier");
- if(dvObject.getIdentifier() == null || dvObject.getIdentifier().isEmpty() ){
- dvObject = generateIdentifier(dvObject);
- }
- String identifier = getIdentifier(dvObject);
- Map metadata = getMetadataForCreateIndicator(dvObject);
- metadata.put("_status", "reserved");
- try {
- String retString = doiDataCiteRegisterService.reserveIdentifier(identifier, metadata, dvObject);
- logger.log(Level.FINE, "create DOI identifier retString : " + retString);
- return retString;
- } catch (Exception e) {
- logger.log(Level.WARNING, "Identifier not created: create failed", e);
- throw e;
- }
- }
-
- @Override
- public Map getIdentifierMetadata(DvObject dvObject) {
- logger.log(Level.FINE,"getIdentifierMetadata");
- String identifier = getIdentifier(dvObject);
- Map metadata = new HashMap<>();
- try {
- metadata = doiDataCiteRegisterService.getMetadata(identifier);
- } catch (Exception e) {
- logger.log(Level.WARNING, "getIdentifierMetadata failed", e);
- }
- return metadata;
- }
-
-
- /**
- * Modifies the DOI metadata for a Dataset
- * @param dvObject the dvObject whose metadata needs to be modified
- * @return the Dataset identifier, or null if the modification failed
- * @throws java.lang.Exception
- */
- @Override
- public String modifyIdentifierTargetURL(DvObject dvObject) throws Exception {
- logger.log(Level.FINE,"modifyIdentifier");
- String identifier = getIdentifier(dvObject);
- try {
- HashMap metadata = doiDataCiteRegisterService.getMetadata(identifier);
- doiDataCiteRegisterService.modifyIdentifier(identifier, metadata, dvObject);
- } catch (Exception e) {
- logger.log(Level.WARNING, "modifyMetadata failed", e);
- throw e;
- }
- return identifier;
- }
-
- public void deleteRecordFromCache(Dataset datasetIn){
- logger.log(Level.FINE,"deleteRecordFromCache");
- String identifier = getIdentifier(datasetIn);
- HashMap doiMetadata = new HashMap();
- try {
- doiMetadata = doiDataCiteRegisterService.getMetadata(identifier);
- } catch (Exception e) {
- logger.log(Level.WARNING, "get matadata failed cannot delete");
- logger.log(Level.WARNING, "String {0}", e.toString());
- logger.log(Level.WARNING, "localized message {0}", e.getLocalizedMessage());
- logger.log(Level.WARNING, "cause", e.getCause());
- logger.log(Level.WARNING, "message {0}", e.getMessage());
- }
-
- String idStatus = (String) doiMetadata.get("_status");
-
- if (idStatus == null || idStatus.equals("reserved")) {
- logger.log(Level.WARNING, "Delete status is reserved..");
- try {
- doiDataCiteRegisterService.deleteIdentifier(identifier);
- } catch (Exception e) {
- logger.log(Level.WARNING, "delete failed");
- logger.log(Level.WARNING, "String {0}", e.toString());
- logger.log(Level.WARNING, "localized message {0}", e.getLocalizedMessage());
- logger.log(Level.WARNING, "cause", e.getCause());
- logger.log(Level.WARNING, "message {0}", e.getMessage());
- throw new RuntimeException(e);
- }
- }
- }
-
- /*
- * Deletes a DOI if it is in DRAFT/RESERVED state or removes metadata and changes it from PUBLIC/FINDABLE to REGISTERED.
- */
- @Override
- public void deleteIdentifier(DvObject dvObject) throws IOException, HttpException {
- logger.log(Level.FINE,"deleteIdentifier");
- String identifier = getIdentifier(dvObject);
- //ToDo - PidUtils currently has a DataCite API call that would get the status at DataCite for this identifier - that could be more accurate than assuming based on whether the dvObject has been published
- String idStatus = DRAFT;
- if(dvObject.isReleased()) {
- idStatus = PUBLIC;
- }
- if ( idStatus != null ) {
- switch ( idStatus ) {
- case RESERVED:
- case DRAFT:
- logger.log(Level.INFO, "Delete status is reserved..");
- //service only removes the identifier from the cache (since it was written before DOIs could be registered in draft state)
- doiDataCiteRegisterService.deleteIdentifier(identifier);
- //So we call the deleteDraftIdentifier method below until things are refactored
- deleteDraftIdentifier(dvObject);
- break;
-
- case PUBLIC:
- case FINDABLE:
- //if public then it has been released set to unavailable and reset target to n2t url
- Map metadata = addDOIMetadataForDestroyedDataset(dvObject);
- metadata.put("_status", "registered");
- metadata.put("_target", getTargetUrl(dvObject));
- doiDataCiteRegisterService.deactivateIdentifier(identifier, metadata, dvObject);
- break;
- }
- }
- }
-
- /**
- * Deletes DOI from the DataCite side, if possible. Only "draft" DOIs can be
- * deleted.
- */
- private void deleteDraftIdentifier(DvObject dvObject) throws IOException {
-
- //ToDo - incorporate into DataCiteRESTfulClient
- String baseUrl = JvmSettings.DATACITE_REST_API_URL.lookup();
- String username = JvmSettings.DATACITE_USERNAME.lookup();
- String password = JvmSettings.DATACITE_PASSWORD.lookup();
- GlobalId doi = dvObject.getGlobalId();
- /**
- * Deletes the DOI from DataCite if it can. Returns 204 if PID was deleted
- * (only possible for "draft" DOIs), 405 (method not allowed) if the DOI
- * wasn't deleted (because it's in "findable" state, for example, 404 if the
- * DOI wasn't found, and possibly other status codes such as 500 if DataCite
- * is down.
- */
-
- URL url = new URL(baseUrl + "/dois/" + doi.getAuthority() + "/" + doi.getIdentifier());
- HttpURLConnection connection = null;
- connection = (HttpURLConnection) url.openConnection();
- connection.setRequestMethod("DELETE");
- String userpass = username + ":" + password;
- String basicAuth = "Basic " + new String(Base64.getEncoder().encode(userpass.getBytes()));
- connection.setRequestProperty("Authorization", basicAuth);
- int status = connection.getResponseCode();
- if(status!=HttpStatus.SC_NO_CONTENT) {
- logger.warning("Incorrect Response Status from DataCite: " + status + " : " + connection.getResponseMessage());
- throw new HttpException("Status: " + status);
- }
- logger.fine("deleteDoi status for " + doi.asString() + ": " + status);
- }
-
- @Override
- public boolean publicizeIdentifier(DvObject dvObject) {
- logger.log(Level.FINE,"updateIdentifierStatus");
- if(dvObject.getIdentifier() == null || dvObject.getIdentifier().isEmpty() ){
- dvObject = generateIdentifier(dvObject);
- }
- String identifier = getIdentifier(dvObject);
- Map metadata = getUpdateMetadata(dvObject);
- metadata.put("_status", PUBLIC);
- metadata.put("datacite.publicationyear", generateYear(dvObject));
- metadata.put("_target", getTargetUrl(dvObject));
- try {
- doiDataCiteRegisterService.registerIdentifier(identifier, metadata, dvObject);
- return true;
- } catch (Exception e) {
- logger.log(Level.WARNING, "modifyMetadata failed: " + e.getMessage(), e);
- return false;
- }
- }
-
-
- @Override
- public List getProviderInformation(){
- return List.of("DataCite", "https://status.datacite.org");
- }
-
-
-
- @Override
- protected String getProviderKeyName() {
- return "DataCite";
- }
-}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DOIServiceBean.java
deleted file mode 100644
index 0182c745cd0..00000000000
--- a/src/main/java/edu/harvard/iq/dataverse/DOIServiceBean.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package edu.harvard.iq.dataverse;
-
-import edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key;
-
-public abstract class DOIServiceBean extends AbstractGlobalIdServiceBean {
-
- public static final String DOI_PROTOCOL = "doi";
- public static final String DOI_RESOLVER_URL = "https://doi.org/";
- public static final String HTTP_DOI_RESOLVER_URL = "http://doi.org/";
- public static final String DXDOI_RESOLVER_URL = "https://dx.doi.org/";
- public static final String HTTP_DXDOI_RESOLVER_URL = "http://dx.doi.org/";
-
- public DOIServiceBean() {
- super();
- }
-
- @Override
- public GlobalId parsePersistentId(String pidString) {
- if (pidString.startsWith(DOI_RESOLVER_URL)) {
- pidString = pidString.replace(DOI_RESOLVER_URL,
- (DOI_PROTOCOL + ":"));
- } else if (pidString.startsWith(HTTP_DOI_RESOLVER_URL)) {
- pidString = pidString.replace(HTTP_DOI_RESOLVER_URL,
- (DOI_PROTOCOL + ":"));
- } else if (pidString.startsWith(DXDOI_RESOLVER_URL)) {
- pidString = pidString.replace(DXDOI_RESOLVER_URL,
- (DOI_PROTOCOL + ":"));
- }
- return super.parsePersistentId(pidString);
- }
-
- @Override
- public GlobalId parsePersistentId(String protocol, String identifierString) {
-
- if (!DOI_PROTOCOL.equals(protocol)) {
- return null;
- }
- GlobalId globalId = super.parsePersistentId(protocol, identifierString);
- if (globalId!=null && !GlobalIdServiceBean.checkDOIAuthority(globalId.getAuthority())) {
- return null;
- }
- return globalId;
- }
-
- @Override
- public GlobalId parsePersistentId(String protocol, String authority, String identifier) {
-
- if (!DOI_PROTOCOL.equals(protocol)) {
- return null;
- }
- return super.parsePersistentId(protocol, authority, identifier);
- }
-
- public String getUrlPrefix() {
- return DOI_RESOLVER_URL;
- }
-
- @Override
- public boolean isConfigured() {
- if (configured == null) {
- if (getProviderKeyName() == null) {
- configured = false;
- } else {
- String doiProvider = settingsService.getValueForKey(Key.DoiProvider, "");
- if (getProviderKeyName().equals(doiProvider)) {
- configured = true;
- } else if (!doiProvider.isEmpty()) {
- configured = false;
- }
- }
- }
- return super.isConfigured();
- }
-
- protected String getProviderKeyName() {
- return null;
- }
-}
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
index 9b4b89db44f..a012175deae 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
@@ -7,6 +7,7 @@
import edu.harvard.iq.dataverse.branding.BrandingUtil;
import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
+import edu.harvard.iq.dataverse.pidproviders.AbstractPidProvider;
import java.io.BufferedWriter;
import java.io.ByteArrayOutputStream;
@@ -635,12 +636,12 @@ public Map getDataCiteMetadata() {
String authorString = getAuthorsString();
if (authorString.isEmpty()) {
- authorString = AbstractGlobalIdServiceBean.UNAVAILABLE;
+ authorString = AbstractPidProvider.UNAVAILABLE;
}
String producerString = getPublisher();
if (producerString.isEmpty()) {
- producerString = AbstractGlobalIdServiceBean.UNAVAILABLE;
+ producerString = AbstractPidProvider.UNAVAILABLE;
}
metadata.put("datacite.creator", authorString);
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 3d8086b142b..53cdff31cc2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -545,61 +545,61 @@ public void setDescription(String description) {
fmd.setDescription(description);
}
}
+
+ public FileMetadata getDraftFileMetadata() {
+ FileMetadata latestFileMetadata = getLatestFileMetadata();
+ if (latestFileMetadata.getDatasetVersion().isDraft()) {
+ return latestFileMetadata;
+ }
+ return null;
+ }
public FileMetadata getFileMetadata() {
return getLatestFileMetadata();
}
-
+
public FileMetadata getLatestFileMetadata() {
- FileMetadata fmd = null;
+ FileMetadata resultFileMetadata = null;
- // for newly added or harvested, just return the one fmd
if (fileMetadatas.size() == 1) {
return fileMetadatas.get(0);
}
-
+
for (FileMetadata fileMetadata : fileMetadatas) {
- // if it finds a draft, return it
if (fileMetadata.getDatasetVersion().getVersionState().equals(VersionState.DRAFT)) {
return fileMetadata;
- }
-
- // otherwise return the one with the latest version number
- // duplicate logic in getLatestPublishedFileMetadata()
- if (fmd == null || fileMetadata.getDatasetVersion().getVersionNumber().compareTo( fmd.getDatasetVersion().getVersionNumber() ) > 0 ) {
- fmd = fileMetadata;
- } else if ((fileMetadata.getDatasetVersion().getVersionNumber().compareTo( fmd.getDatasetVersion().getVersionNumber())==0 )&&
- ( fileMetadata.getDatasetVersion().getMinorVersionNumber().compareTo( fmd.getDatasetVersion().getMinorVersionNumber()) > 0 ) ) {
- fmd = fileMetadata;
}
+ resultFileMetadata = getTheNewerFileMetadata(resultFileMetadata, fileMetadata);
}
- return fmd;
+
+ return resultFileMetadata;
}
-
-// //Returns null if no published version
+
public FileMetadata getLatestPublishedFileMetadata() throws UnsupportedOperationException {
- FileMetadata fmd = null;
-
- for (FileMetadata fileMetadata : fileMetadatas) {
- // if it finds a draft, skip
- if (fileMetadata.getDatasetVersion().getVersionState().equals(VersionState.DRAFT)) {
- continue;
- }
-
- // otherwise return the one with the latest version number
- // duplicate logic in getLatestFileMetadata()
- if (fmd == null || fileMetadata.getDatasetVersion().getVersionNumber().compareTo( fmd.getDatasetVersion().getVersionNumber() ) > 0 ) {
- fmd = fileMetadata;
- } else if ((fileMetadata.getDatasetVersion().getVersionNumber().compareTo( fmd.getDatasetVersion().getVersionNumber())==0 )&&
- ( fileMetadata.getDatasetVersion().getMinorVersionNumber().compareTo( fmd.getDatasetVersion().getMinorVersionNumber()) > 0 ) ) {
- fmd = fileMetadata;
- }
- }
- if(fmd == null) {
+ FileMetadata resultFileMetadata = fileMetadatas.stream()
+ .filter(metadata -> !metadata.getDatasetVersion().getVersionState().equals(VersionState.DRAFT))
+ .reduce(null, DataFile::getTheNewerFileMetadata);
+
+ if (resultFileMetadata == null) {
throw new UnsupportedOperationException("No published metadata version for DataFile " + this.getId());
}
- return fmd;
+ return resultFileMetadata;
+ }
+
+ public static FileMetadata getTheNewerFileMetadata(FileMetadata current, FileMetadata candidate) {
+ if (current == null) {
+ return candidate;
+ }
+
+ DatasetVersion currentVersion = current.getDatasetVersion();
+ DatasetVersion candidateVersion = candidate.getDatasetVersion();
+
+ if (DatasetVersion.compareByVersion.compare(candidateVersion, currentVersion) > 0) {
+ return candidate;
+ }
+
+ return current;
}
/**
@@ -610,7 +610,7 @@ public long getFilesize() {
if (this.filesize == null) {
// -1 means "unknown"
return -1;
- }
+ }
return this.filesize;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
index c9d50bbed9d..8ceb529a5d4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
@@ -1242,9 +1242,8 @@ public List selectFilesWithMissingOriginalSizes() {
* Check that a identifier entered by the user is unique (not currently used
* for any other study in this Dataverse Network). Also check for duplicate
* in the remote PID service if needed
- * @param userIdentifier
- * @param datafile
- * @param idServiceBean
+ * @param datafileId
+ * @param storageLocation
* @return {@code true} iff the global identifier is unique.
*/
public void finalizeFileDelete(Long dataFileId, String storageLocation) throws IOException {
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataTable.java b/src/main/java/edu/harvard/iq/dataverse/DataTable.java
index a17d8c65138..95f3aed0f40 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataTable.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataTable.java
@@ -112,6 +112,16 @@ public DataTable() {
@Column( nullable = true )
private String originalFileName;
+
+ /**
+ * The physical tab-delimited file is in storage with the list of variable
+ * names saved as the 1st line. This means that we do not need to generate
+ * this line on the fly. (Also means that direct download mechanism can be
+ * used for this file!)
+ */
+ @Column(nullable = false)
+ private boolean storedWithVariableHeader = false;
+
/*
* Getter and Setter methods:
*/
@@ -206,6 +216,14 @@ public void setOriginalFileName(String originalFileName) {
this.originalFileName = originalFileName;
}
+ public boolean isStoredWithVariableHeader() {
+ return storedWithVariableHeader;
+ }
+
+ public void setStoredWithVariableHeader(boolean storedWithVariableHeader) {
+ this.storedWithVariableHeader = storedWithVariableHeader;
+ }
+
/*
* Custom overrides for hashCode(), equals() and toString() methods:
*/
diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
index a2f560bc959..eaf406d01bf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
@@ -317,6 +317,7 @@ public boolean isDeaccessioned() {
}
return hasDeaccessionedVersions; // since any published version would have already returned
}
+
public DatasetVersion getLatestVersion() {
return getVersions().get(0);
@@ -852,6 +853,23 @@ public String getRemoteArchiveURL() {
if (StringUtil.nonEmpty(this.getProtocol())
&& StringUtil.nonEmpty(this.getAuthority())
&& StringUtil.nonEmpty(this.getIdentifier())) {
+
+ // If there is a custom archival url for this Harvesting
+ // Source, we'll use that
+ String harvestingUrl = this.getHarvestedFrom().getHarvestingUrl();
+ String archivalUrl = this.getHarvestedFrom().getArchiveUrl();
+ if (!harvestingUrl.contains(archivalUrl)) {
+ // When a Harvesting Client is created, the “archive url” is set to
+ // just the host part of the OAI url automatically.
+ // For example, if the OAI url was "https://remote.edu/oai",
+ // the archive url will default to "https://remote.edu/".
+ // If this is no longer true, we know it means the admin
+ // went to the trouble of setting it to something else -
+ // so we should use this url for the redirects back to source,
+ // instead of the global id resolver.
+ return archivalUrl + this.getAuthority() + "/" + this.getIdentifier();
+ }
+ // ... if not, we'll redirect to the resolver for the global id:
return this.getPersistentURL();
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java
index c836a20893f..31e7758c7d5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java
@@ -595,7 +595,8 @@ public boolean removeBlankDatasetFieldValues() {
return true;
}
} else { // controlled vocab
- if (this.getControlledVocabularyValues().isEmpty()) {
+ // during harvesting some CVV are put in getDatasetFieldValues. we don't want to remove those
+ if (this.getControlledVocabularyValues().isEmpty() && this.getDatasetFieldValues().isEmpty()) {
return true;
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java
index 1621b80df55..22bad42df96 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java
@@ -112,8 +112,8 @@ public class DatasetFieldConstant implements java.io.Serializable {
public final static String geographicUnit="geographicUnit";
public final static String westLongitude="westLongitude";
public final static String eastLongitude="eastLongitude";
- public final static String northLatitude="northLongitude"; //Changed to match DB - incorrectly entered into DB: https://github.com/IQSS/dataverse/issues/5645
- public final static String southLatitude="southLongitude"; //Incorrect in DB: https://github.com/IQSS/dataverse/issues/5645
+ public final static String northLatitude="northLatitude";
+ public final static String southLatitude="southLatitude";
public final static String unitOfAnalysis="unitOfAnalysis";
public final static String universe="universe";
public final static String kindOfData="kindOfData";
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
index ce2b00086ec..6223cd83773 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
@@ -19,6 +19,8 @@
import jakarta.ejb.EJB;
import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
import jakarta.inject.Named;
import jakarta.json.Json;
import jakarta.json.JsonArray;
@@ -34,6 +36,7 @@
import jakarta.persistence.NoResultException;
import jakarta.persistence.NonUniqueResultException;
import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.PersistenceException;
import jakarta.persistence.TypedQuery;
import org.apache.commons.codec.digest.DigestUtils;
@@ -46,7 +49,6 @@
import org.apache.http.impl.client.HttpClients;
import org.apache.http.protocol.HttpContext;
import org.apache.http.util.EntityUtils;
-
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
/**
@@ -448,6 +450,7 @@ public JsonObject getExternalVocabularyValue(String termUri) {
* @param cvocEntry - the configuration for the DatasetFieldType associated with this term
* @param term - the term uri as a string
*/
+ @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
public void registerExternalTerm(JsonObject cvocEntry, String term) {
String retrievalUri = cvocEntry.getString("retrieval-uri");
String prefix = cvocEntry.getString("prefix", null);
@@ -518,6 +521,8 @@ public void process(HttpResponse response, HttpContext context) throws HttpExcep
logger.fine("Wrote value for term: " + term);
} catch (JsonException je) {
logger.severe("Error retrieving: " + retrievalUri + " : " + je.getMessage());
+ } catch (PersistenceException e) {
+ logger.fine("Problem persisting: " + retrievalUri + " : " + e.getMessage());
}
} else {
logger.severe("Received response code : " + statusCode + " when retrieving " + retrievalUri
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java
index 824b486a42d..01785359e0e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java
@@ -284,7 +284,7 @@ public void setDisplayOnCreate(boolean displayOnCreate) {
}
public boolean isControlledVocabulary() {
- return controlledVocabularyValues != null && !controlledVocabularyValues.isEmpty();
+ return allowControlledVocabulary;
}
/**
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index b79f387f20b..4c436715f0d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -11,7 +11,6 @@
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.branding.BrandingUtil;
import edu.harvard.iq.dataverse.dataaccess.StorageIO;
-import edu.harvard.iq.dataverse.dataaccess.AbstractRemoteOverlayAccessIO;
import edu.harvard.iq.dataverse.dataaccess.DataAccess;
import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore;
import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
@@ -25,6 +24,7 @@
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.CommandContext;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.impl.CheckRateLimitForDatasetPageCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CuratePublishedDatasetVersionCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DeaccessionDatasetVersionCommand;
@@ -37,13 +37,17 @@
import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
import edu.harvard.iq.dataverse.export.ExportService;
+import edu.harvard.iq.dataverse.util.cache.CacheFactoryBean;
import io.gdcc.spi.export.ExportException;
import io.gdcc.spi.export.Exporter;
import edu.harvard.iq.dataverse.ingest.IngestRequest;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
import edu.harvard.iq.dataverse.license.LicenseServiceBean;
import edu.harvard.iq.dataverse.metadataimport.ForeignMetadataImportServiceBean;
+import edu.harvard.iq.dataverse.pidproviders.PidProvider;
import edu.harvard.iq.dataverse.pidproviders.PidUtil;
+import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider;
+import edu.harvard.iq.dataverse.pidproviders.doi.datacite.DataCiteDOIProvider;
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
import edu.harvard.iq.dataverse.privateurl.PrivateUrlUtil;
@@ -240,12 +244,16 @@ public enum DisplayMode {
SolrClientService solrClientService;
@EJB
DvObjectServiceBean dvObjectService;
+ @EJB
+ CacheFactoryBean cacheFactory;
@Inject
DataverseRequestServiceBean dvRequestService;
@Inject
DatasetVersionUI datasetVersionUI;
@Inject
PermissionsWrapper permissionsWrapper;
+ @Inject
+ NavigationWrapper navigationWrapper;
@Inject
FileDownloadHelper fileDownloadHelper;
@Inject
@@ -706,6 +714,16 @@ public void setNumberOfFilesToShow(Long numberOfFilesToShow) {
this.numberOfFilesToShow = numberOfFilesToShow;
}
+ private String returnReason = "";
+
+ public String getReturnReason() {
+ return returnReason;
+ }
+
+ public void setReturnReason(String returnReason) {
+ this.returnReason = returnReason;
+ }
+
public void showAll(){
setNumberOfFilesToShow(new Long(fileMetadatasSearch.size()));
}
@@ -772,11 +790,17 @@ public boolean isIndexedVersion() {
return isIndexedVersion = false;
}
// If this is the latest published version, we want to confirm that this
- // version was successfully indexed after the last publication
-
+ // version was successfully indexed after the last publication
if (isThisLatestReleasedVersion()) {
- return isIndexedVersion = (workingVersion.getDataset().getIndexTime() != null)
- && workingVersion.getDataset().getIndexTime().after(workingVersion.getReleaseTime());
+ if (workingVersion.getDataset().getIndexTime() == null) {
+ return isIndexedVersion = false;
+ }
+ // We add 3 hours to the indexed time to prevent false negatives
+ // when indexed time gets overwritten in finalizing the publication step
+ // by a value before the release time
+ final long duration = 3 * 60 * 60 * 1000;
+ final Timestamp movedIndexTime = new Timestamp(workingVersion.getDataset().getIndexTime().getTime() + duration);
+ return isIndexedVersion = movedIndexTime.after(workingVersion.getReleaseTime());
}
// Drafts don't have the indextime stamps set/incremented when indexed,
@@ -1918,15 +1942,15 @@ private void setIdByPersistentId() {
}
private String init(boolean initFull) {
-
+ // Check for rate limit exceeded. Must be done before anything else to prevent unnecessary processing.
+ if (!cacheFactory.checkRate(session.getUser(), new CheckRateLimitForDatasetPageCommand(null,null))) {
+ return navigationWrapper.tooManyRequests();
+ }
//System.out.println("_YE_OLDE_QUERY_COUNTER_"); // for debug purposes
setDataverseSiteUrl(systemConfig.getDataverseSiteUrl());
guestbookResponse = new GuestbookResponse();
- String nonNullDefaultIfKeyNotFound = "";
- protocol = settingsWrapper.getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound);
- authority = settingsWrapper.getValueForKey(SettingsServiceBean.Key.Authority, nonNullDefaultIfKeyNotFound);
String sortOrder = getSortOrder();
if(sortOrder != null) {
FileMetadata.setCategorySortOrder(sortOrder);
@@ -2026,7 +2050,7 @@ private String init(boolean initFull) {
// to the local 404 page, below.
logger.warning("failed to issue a redirect to "+originalSourceURL);
}
- return originalSourceURL;
+ return null;
}
return permissionsWrapper.notFound();
@@ -2108,8 +2132,6 @@ private String init(boolean initFull) {
editMode = EditMode.CREATE;
selectedHostDataverse = dataverseService.find(ownerId);
dataset.setOwner(selectedHostDataverse);
- dataset.setProtocol(protocol);
- dataset.setAuthority(authority);
if (dataset.getOwner() == null) {
return permissionsWrapper.notFound();
@@ -2119,9 +2141,9 @@ private String init(boolean initFull) {
//Wait until the create command before actually getting an identifier, except if we're using directUpload
//Need to assign an identifier prior to calls to requestDirectUploadUrl if direct upload is used.
if ( isEmpty(dataset.getIdentifier()) && systemConfig.directUploadEnabled(dataset) ) {
- CommandContext ctxt = commandEngine.getContext();
- GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(ctxt);
- dataset.setIdentifier(idServiceBean.generateDatasetIdentifier(dataset));
+ CommandContext ctxt = commandEngine.getContext();
+ PidProvider pidProvider = ctxt.dvObjects().getEffectivePidGenerator(dataset);
+ pidProvider.generatePid(dataset);
}
dataverseTemplates.addAll(dataverseService.find(ownerId).getTemplates());
if (!dataverseService.find(ownerId).isTemplateRoot()) {
@@ -2326,14 +2348,17 @@ private void displayLockInfo(Dataset dataset) {
lockedDueToIngestVar = true;
}
- // With DataCite, we try to reserve the DOI when the dataset is created. Sometimes this
- // fails because DataCite is down. We show the message below to set expectations that the
- // "Publish" button won't work until the DOI has been reserved using the "Reserve PID" API.
- if (settingsWrapper.isDataCiteInstallation() && dataset.getGlobalIdCreateTime() == null && editMode != EditMode.CREATE) {
- JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.locked.pidNotReserved.message"),
- BundleUtil.getStringFromBundle("dataset.locked.pidNotReserved.message.details"));
+ if (dataset.getGlobalIdCreateTime() == null && editMode != EditMode.CREATE) {
+ // With DataCite, we try to reserve the DOI when the dataset is created. Sometimes this
+ // fails because DataCite is down. We show the message below to set expectations that the
+ // "Publish" button won't work until the DOI has been reserved using the "Reserve PID" API.
+ PidProvider pidProvider = PidUtil.getPidProvider(dataset.getGlobalId().getProviderId());
+ if (DataCiteDOIProvider.TYPE.equals(pidProvider.getProviderType())) {
+ JH.addMessage(FacesMessage.SEVERITY_WARN,
+ BundleUtil.getStringFromBundle("dataset.locked.pidNotReserved.message"),
+ BundleUtil.getStringFromBundle("dataset.locked.pidNotReserved.message.details"));
+ }
}
-
//if necessary refresh publish message also
displayPublishMessage();
@@ -2652,8 +2677,7 @@ public void edit(EditMode editMode) {
public String sendBackToContributor() {
try {
- //FIXME - Get Return Comment from sendBackToContributor popup
- Command cmd = new ReturnDatasetToAuthorCommand(dvRequestService.getDataverseRequest(), dataset, "");
+ Command cmd = new ReturnDatasetToAuthorCommand(dvRequestService.getDataverseRequest(), dataset, returnReason);
dataset = commandEngine.submit(cmd);
JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.reject.success"));
} catch (CommandException ex) {
@@ -3362,7 +3386,7 @@ private boolean filterSelectedFiles(){
if(globusDownloadEnabled) {
String driverId = DataAccess.getStorageDriverFromIdentifier(fmd.getDataFile().getStorageIdentifier());
globusTransferable = GlobusAccessibleStore.isGlobusAccessible(driverId);
- downloadable = downloadable && !AbstractRemoteOverlayAccessIO.isNotDataverseAccessible(driverId);
+ downloadable = downloadable && StorageIO.isDataverseAccessible(driverId);
}
if(downloadable){
getSelectedDownloadableFiles().add(fmd);
@@ -5800,6 +5824,19 @@ public boolean isThisLatestReleasedVersion() {
}
+ public String getCroissant() {
+ if (isThisLatestReleasedVersion()) {
+ final String CROISSANT_SCHEMA_NAME = "croissant";
+ ExportService instance = ExportService.getInstance();
+ String croissant = instance.getExportAsString(dataset, CROISSANT_SCHEMA_NAME);
+ if (croissant != null && !croissant.isEmpty()) {
+ logger.fine("Returning cached CROISSANT.");
+ return croissant;
+ }
+ }
+ return null;
+ }
+
public String getJsonLd() {
if (isThisLatestReleasedVersion()) {
ExportService instance = ExportService.getInstance();
@@ -6395,5 +6432,9 @@ public String getSignpostingLinkHeader() {
}
return signpostingLinkHeader;
}
+
+ public boolean isDOI() {
+ return AbstractDOIProvider.DOI_PROTOCOL.equals(dataset.getGlobalId().getProtocol());
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
index c6df2a2e1ab..9c182164d37 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
@@ -19,6 +19,8 @@
import edu.harvard.iq.dataverse.export.ExportService;
import edu.harvard.iq.dataverse.globus.GlobusServiceBean;
import edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean;
+import edu.harvard.iq.dataverse.pidproviders.PidProvider;
+import edu.harvard.iq.dataverse.pidproviders.PidUtil;
import edu.harvard.iq.dataverse.search.IndexServiceBean;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.BundleUtil;
@@ -61,9 +63,6 @@ public class DatasetServiceBean implements java.io.Serializable {
@EJB
IndexServiceBean indexService;
- @EJB
- DOIEZIdServiceBean doiEZIdServiceBean;
-
@EJB
SettingsServiceBean settingsService;
@@ -700,7 +699,7 @@ public void exportAllDatasets(boolean forceReExport) {
Integer countError = 0;
String logTimestamp = logFormatter.format(new Date());
Logger exportLogger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.DatasetServiceBean." + "ExportAll" + logTimestamp);
- String logFileName = "../logs" + File.separator + "export_" + logTimestamp + ".log";
+ String logFileName = System.getProperty("com.sun.aas.instanceRoot") + File.separator + "logs" + File.separator + "export_" + logTimestamp + ".log";
FileHandler fileHandler;
boolean fileHandlerSuceeded;
try {
@@ -940,80 +939,6 @@ public void callFinalizePublishCommandAsynchronously(Long datasetId, CommandCont
}
}
- /*
- Experimental asynchronous method for requesting persistent identifiers for
- datafiles. We decided not to run this method on upload/create (so files
- will not have persistent ids while in draft; when the draft is published,
- we will force obtaining persistent ids for all the files in the version.
-
- If we go back to trying to register global ids on create, care will need to
- be taken to make sure the asynchronous changes below are not conflicting with
- the changes from file ingest (which may be happening in parallel, also
- asynchronously). We would also need to lock the dataset (similarly to how
- tabular ingest logs the dataset), to prevent the user from publishing the
- version before all the identifiers get assigned - otherwise more conflicts
- are likely. (It sounds like it would make sense to treat these two tasks -
- persistent identifiers for files and ingest - as one post-upload job, so that
- they can be run in sequence). -- L.A. Mar. 2018
- */
- @Asynchronous
- public void obtainPersistentIdentifiersForDatafiles(Dataset dataset) {
- GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(dataset.getProtocol(), commandEngine.getContext());
-
- //If the Id type is sequential and Dependent then write file idenitifiers outside the command
- String datasetIdentifier = dataset.getIdentifier();
- Long maxIdentifier = null;
-
- if (systemConfig.isDataFilePIDSequentialDependent()) {
- maxIdentifier = getMaximumExistingDatafileIdentifier(dataset);
- }
-
- for (DataFile datafile : dataset.getFiles()) {
- logger.info("Obtaining persistent id for datafile id=" + datafile.getId());
-
- if (datafile.getIdentifier() == null || datafile.getIdentifier().isEmpty()) {
-
- logger.info("Obtaining persistent id for datafile id=" + datafile.getId());
-
- if (maxIdentifier != null) {
- maxIdentifier++;
- datafile.setIdentifier(datasetIdentifier + "/" + maxIdentifier.toString());
- } else {
- datafile.setIdentifier(idServiceBean.generateDataFileIdentifier(datafile));
- }
-
- if (datafile.getProtocol() == null) {
- datafile.setProtocol(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""));
- }
- if (datafile.getAuthority() == null) {
- datafile.setAuthority(settingsService.getValueForKey(SettingsServiceBean.Key.Authority, ""));
- }
-
- logger.info("identifier: " + datafile.getIdentifier());
-
- String doiRetString;
-
- try {
- logger.log(Level.FINE, "creating identifier");
- doiRetString = idServiceBean.createIdentifier(datafile);
- } catch (Throwable e) {
- logger.log(Level.WARNING, "Exception while creating Identifier: " + e.getMessage(), e);
- doiRetString = "";
- }
-
- // Check return value to make sure registration succeeded
- if (!idServiceBean.registerWhenPublished() && doiRetString.contains(datafile.getIdentifier())) {
- datafile.setIdentifierRegistered(true);
- datafile.setGlobalIdCreateTime(new Date());
- }
-
- DataFile merged = em.merge(datafile);
- merged = null;
- }
-
- }
- }
-
public long findStorageSize(Dataset dataset) throws IOException {
return findStorageSize(dataset, false, GetDatasetStorageSizeCommand.Mode.STORAGE, null);
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java
index 943a74327d5..351d304bad3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java
@@ -9,12 +9,16 @@
import edu.harvard.iq.dataverse.dataverse.DataverseUtil;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.impl.CheckRateLimitForCollectionPageCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CreateDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CreateSavedSearchCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.LinkDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseCommand;
+import edu.harvard.iq.dataverse.pidproviders.PidProvider;
+import edu.harvard.iq.dataverse.pidproviders.PidProviderFactoryBean;
+import edu.harvard.iq.dataverse.pidproviders.PidUtil;
import edu.harvard.iq.dataverse.search.FacetCategory;
import edu.harvard.iq.dataverse.search.IndexServiceBean;
import edu.harvard.iq.dataverse.search.SearchFields;
@@ -28,15 +32,20 @@
import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
import edu.harvard.iq.dataverse.util.SystemConfig;
import java.util.List;
+
+import edu.harvard.iq.dataverse.util.cache.CacheFactoryBean;
import jakarta.ejb.EJB;
import jakarta.faces.application.FacesMessage;
import jakarta.faces.context.FacesContext;
import jakarta.faces.view.ViewScoped;
import jakarta.inject.Inject;
import jakarta.inject.Named;
+
+import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
@@ -109,7 +118,13 @@ public enum LinkMode {
@EJB
DataverseLinkingServiceBean linkingService;
@Inject PermissionsWrapper permissionsWrapper;
- @Inject DataverseHeaderFragment dataverseHeaderFragment;
+ @Inject
+ NavigationWrapper navigationWrapper;
+ @Inject DataverseHeaderFragment dataverseHeaderFragment;
+ @EJB
+ PidProviderFactoryBean pidProviderFactoryBean;
+ @EJB
+ CacheFactoryBean cacheFactory;
private Dataverse dataverse = new Dataverse();
@@ -310,7 +325,10 @@ public void updateOwnerDataverse() {
public String init() {
//System.out.println("_YE_OLDE_QUERY_COUNTER_"); // for debug purposes
-
+ // Check for rate limit exceeded. Must be done before anything else to prevent unnecessary processing.
+ if (!cacheFactory.checkRate(session.getUser(), new CheckRateLimitForCollectionPageCommand(null,null))) {
+ return navigationWrapper.tooManyRequests();
+ }
if (this.getAlias() != null || this.getId() != null || this.getOwnerId() == null) {// view mode for a dataverse
if (this.getAlias() != null) {
dataverse = dataverseService.findByAlias(this.getAlias());
@@ -362,7 +380,7 @@ public void initFeaturedDataverses() {
List featuredSource = new ArrayList<>();
List featuredTarget = new ArrayList<>();
featuredSource.addAll(dataverseService.findAllPublishedByOwnerId(dataverse.getId()));
- featuredSource.addAll(linkingService.findLinkingDataverses(dataverse.getId()));
+ featuredSource.addAll(linkingService.findLinkedDataverses(dataverse.getId()));
List featuredList = featuredDataverseService.findByDataverseId(dataverse.getId());
for (DataverseFeaturedDataverse dfd : featuredList) {
Dataverse fd = dfd.getFeaturedDataverse();
@@ -1289,4 +1307,34 @@ public String getCurationLabelSetNameLabel() {
public Set> getGuestbookEntryOptions() {
return settingsWrapper.getGuestbookEntryOptions(this.dataverse).entrySet();
}
+
+ public Set> getPidProviderOptions() {
+ PidProvider defaultPidProvider = pidProviderFactoryBean.getDefaultPidGenerator();
+ Set providerIds = PidUtil.getManagedProviderIds();
+ Set> options = new HashSet>();
+ if (providerIds.size() > 1) {
+
+ String label = null;
+ if (this.dataverse.getOwner() != null && this.dataverse.getOwner().getEffectivePidGenerator()!= null) {
+ PidProvider inheritedPidProvider = this.dataverse.getOwner().getEffectivePidGenerator();
+ label = inheritedPidProvider.getLabel() + " " + BundleUtil.getStringFromBundle("dataverse.inherited") + ": "
+ + inheritedPidProvider.getProtocol() + ":" + inheritedPidProvider.getAuthority()
+ + inheritedPidProvider.getSeparator() + inheritedPidProvider.getShoulder();
+ } else {
+ label = defaultPidProvider.getLabel() + " " + BundleUtil.getStringFromBundle("dataverse.default") + ": "
+ + defaultPidProvider.getProtocol() + ":" + defaultPidProvider.getAuthority()
+ + defaultPidProvider.getSeparator() + defaultPidProvider.getShoulder();
+ }
+ Entry option = new AbstractMap.SimpleEntry("default", label);
+ options.add(option);
+ }
+ for (String providerId : providerIds) {
+ PidProvider pidProvider = PidUtil.getPidProvider(providerId);
+ String label = pidProvider.getLabel() + ": " + pidProvider.getProtocol() + ":" + pidProvider.getAuthority()
+ + pidProvider.getSeparator() + pidProvider.getShoulder();
+ Entry option = new AbstractMap.SimpleEntry(providerId, label);
+ options.add(option);
+ }
+ return options;
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java
index 82057315fbb..56d26a7260d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java
@@ -1,14 +1,20 @@
package edu.harvard.iq.dataverse;
import edu.harvard.iq.dataverse.dataaccess.DataAccess;
+import edu.harvard.iq.dataverse.pidproviders.PidProvider;
+import edu.harvard.iq.dataverse.pidproviders.PidUtil;
import edu.harvard.iq.dataverse.settings.JvmSettings;
import edu.harvard.iq.dataverse.storageuse.StorageUse;
import edu.harvard.iq.dataverse.util.SystemConfig;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
import jakarta.persistence.CascadeType;
import java.util.Optional;
-
import jakarta.persistence.MappedSuperclass;
import jakarta.persistence.OneToOne;
+import jakarta.persistence.Transient;
+
import org.apache.commons.lang3.StringUtils;
/**
@@ -42,6 +48,11 @@ public boolean isEffectivelyPermissionRoot() {
private String metadataLanguage=null;
private Boolean guestbookAtRequest = null;
+
+ private String pidGeneratorSpecs = null;
+
+ @Transient
+ private PidProvider pidGenerator = null;
@OneToOne(mappedBy = "dvObjectContainer",cascade={ CascadeType.REMOVE, CascadeType.PERSIST}, orphanRemoval=true)
private StorageUse storageUse;
@@ -175,4 +186,79 @@ public void setCurationLabelSetName(String setName) {
public void setStorageUse(StorageUse storageUse) {
this.storageUse = storageUse;
}
+
+
+ /* Dataverse collections and dataset can be configured to use different PidProviders as PID generators for contained objects (datasets or data files).
+ * This mechanism is similar to others except that the stored value is a JSON object defining the protocol, authority, shoulder, and, optionally, the separator for the PidProvider.
+ */
+
+ public String getPidGeneratorSpecs() {
+ return pidGeneratorSpecs;
+ }
+
+ public void setPidGeneratorSpecs(String pidGeneratorSpecs) {
+ this.pidGeneratorSpecs = pidGeneratorSpecs;
+ }
+
+ // Used in JSF when selecting the PidGenerator
+ // It only returns an id if this dvObjectContainer has PidGenerator specs set on it, otherwise it returns "default"
+ public String getPidGeneratorId() {
+ if (StringUtils.isBlank(getPidGeneratorSpecs())) {
+ return "default";
+ } else {
+ return getEffectivePidGenerator().getId();
+ }
+ }
+
+ //Used in JSF when setting the PidGenerator
+ public void setPidGeneratorId(String pidGeneratorId) {
+ // Note that the "default" provider will not be found so will result in
+ // setPidGenerator(null), which unsets the pidGenerator/Specs as desired
+ setPidGenerator(PidUtil.getPidProvider(pidGeneratorId));
+ }
+
+ public void setPidGenerator(PidProvider pidGenerator) {
+ this.pidGenerator = pidGenerator;
+ if (pidGenerator != null) {
+ JsonObjectBuilder job = jakarta.json.Json.createObjectBuilder();
+ this.pidGeneratorSpecs = job.add("protocol", pidGenerator.getProtocol())
+ .add("authority", pidGenerator.getAuthority()).add("shoulder", pidGenerator.getShoulder())
+ .add("separator", pidGenerator.getSeparator()).build().toString();
+ } else {
+ this.pidGeneratorSpecs = null;
+ }
+ }
+
+ public PidProvider getEffectivePidGenerator() {
+ if (pidGenerator == null) {
+ String specs = getPidGeneratorSpecs();
+ if (StringUtils.isBlank(specs)) {
+ GlobalId pid = getGlobalId();
+ if ((pid != null) && PidUtil.getPidProvider(pid.getProviderId()).canCreatePidsLike(pid)) {
+ pidGenerator = PidUtil.getPidProvider(pid.getProviderId());
+ } else {
+ if (getOwner() != null) {
+ pidGenerator = getOwner().getEffectivePidGenerator();
+ }
+ }
+ } else {
+ JsonObject providerSpecs = JsonUtil.getJsonObject(specs);
+ if (providerSpecs.containsKey("separator")) {
+ pidGenerator = PidUtil.getPidProvider(providerSpecs.getString("protocol"),
+ providerSpecs.getString("authority"), providerSpecs.getString("shoulder"),
+ providerSpecs.getString("separator"));
+ } else {
+ pidGenerator = PidUtil.getPidProvider(providerSpecs.getString("protocol"),
+ providerSpecs.getString("authority"), providerSpecs.getString("shoulder"));
+ }
+ }
+ if(pidGenerator!=null && pidGenerator.canManagePID()) {
+ setPidGenerator(pidGenerator);
+ } else {
+ setPidGenerator(null);
+ }
+ }
+ return pidGenerator;
+ }
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java
index d4219c36149..bd7fbeaff10 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java
@@ -1,8 +1,9 @@
package edu.harvard.iq.dataverse;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.pidproviders.PidProvider;
+import edu.harvard.iq.dataverse.pidproviders.PidProviderFactoryBean;
import edu.harvard.iq.dataverse.pidproviders.PidUtil;
-
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
@@ -12,6 +13,8 @@
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
+
+import jakarta.ejb.EJB;
import jakarta.ejb.Stateless;
import jakarta.ejb.TransactionAttribute;
import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW;
@@ -38,6 +41,9 @@ public class DvObjectServiceBean implements java.io.Serializable {
@PersistenceContext(unitName = "VDCNet-ejbPU")
private EntityManager em;
+ @EJB
+ PidProviderFactoryBean pidProviderFactoryBean;
+
private static final Logger logger = Logger.getLogger(DvObjectServiceBean.class.getCanonicalName());
/**
* @param dvoc The object we check
@@ -389,4 +395,19 @@ public String generateNewIdentifierByStoredProcedure() {
return (String) query.getOutputParameterValue(1);
}
+ /** @deprecated Backward-compatibility method to get the effective pid generator for a DvObjectContainer.
+ * If the dvObjectContainer method fails, this method will check for the old global default settings.
+ * If/when those are no longer supported, this method can be removed and replaced with calls directly
+ * to dvObjectContainer.getEffectivePidGenerator();
+ *
+ */
+ @Deprecated(forRemoval = true, since = "2024-02-09")
+ public PidProvider getEffectivePidGenerator(DvObjectContainer dvObjectContainer) {
+ PidProvider pidGenerator = dvObjectContainer.getEffectivePidGenerator();
+ if (pidGenerator == null) {
+ pidGenerator = pidProviderFactoryBean.getDefaultPidGenerator();
+ }
+ return pidGenerator;
+ }
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
index a6f31e24764..993cb02b66b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
@@ -1237,9 +1237,6 @@ public String save() {
- We decided not to bother obtaining persistent ids for new files
as they are uploaded and created. The identifiers will be assigned
later, when the version is published.
-
- logger.info("starting async job for obtaining persistent ids for files.");
- datasetService.obtainPersistentIdentifiersForDatafiles(dataset);
*/
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java
index 5a689c06019..bb3fa475847 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java
@@ -4,6 +4,7 @@
import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean;
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
+import edu.harvard.iq.dataverse.util.cache.CacheFactoryBean;
import edu.harvard.iq.dataverse.engine.DataverseEngine;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean;
@@ -16,9 +17,9 @@
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
+import edu.harvard.iq.dataverse.engine.command.exception.RateLimitCommandException;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
-import edu.harvard.iq.dataverse.pidproviders.FakePidProviderServiceBean;
-import edu.harvard.iq.dataverse.pidproviders.PermaLinkPidProviderServiceBean;
+import edu.harvard.iq.dataverse.pidproviders.PidProviderFactoryBean;
import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
import edu.harvard.iq.dataverse.search.IndexBatchServiceBean;
import edu.harvard.iq.dataverse.search.IndexServiceBean;
@@ -49,7 +50,6 @@
import static jakarta.ejb.TransactionAttributeType.SUPPORTS;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
-import jakarta.validation.ConstraintViolation;
import jakarta.validation.ConstraintViolationException;
/**
@@ -114,20 +114,8 @@ public class EjbDataverseEngine {
DataverseFieldTypeInputLevelServiceBean fieldTypeInputLevels;
@EJB
- DOIEZIdServiceBean doiEZId;
-
- @EJB
- DOIDataCiteServiceBean doiDataCite;
-
- @EJB
- FakePidProviderServiceBean fakePidProvider;
+ PidProviderFactoryBean pidProviderFactory;
- @EJB
- HandlenetServiceBean handleNet;
-
- @EJB
- PermaLinkPidProviderServiceBean permaLinkProvider;
-
@EJB
SettingsServiceBean settings;
@@ -190,7 +178,9 @@ public class EjbDataverseEngine {
@EJB
EjbDataverseEngineInner innerEngine;
-
+
+ @EJB
+ CacheFactoryBean cacheFactory;
@Resource
EJBContext ejbCtxt;
@@ -216,7 +206,11 @@ public R submit(Command aCommand) throws CommandException {
try {
logRec.setUserIdentifier( aCommand.getRequest().getUser().getIdentifier() );
-
+ // Check for rate limit exceeded. Must be done before anything else to prevent unnecessary processing.
+ if (!cacheFactory.checkRate(aCommand.getRequest().getUser(), aCommand)) {
+ throw new RateLimitCommandException(BundleUtil.getStringFromBundle("command.exception.user.ratelimited", Arrays.asList(aCommand.getClass().getSimpleName())), aCommand);
+ }
+
// Check permissions - or throw an exception
Map> requiredMap = aCommand.getRequiredPermissions();
if (requiredMap == null) {
@@ -484,28 +478,8 @@ public DataverseFieldTypeInputLevelServiceBean fieldTypeInputLevels() {
}
@Override
- public DOIEZIdServiceBean doiEZId() {
- return doiEZId;
- }
-
- @Override
- public DOIDataCiteServiceBean doiDataCite() {
- return doiDataCite;
- }
-
- @Override
- public FakePidProviderServiceBean fakePidProvider() {
- return fakePidProvider;
- }
-
- @Override
- public HandlenetServiceBean handleNet() {
- return handleNet;
- }
-
- @Override
- public PermaLinkPidProviderServiceBean permaLinkProvider() {
- return permaLinkProvider;
+ public PidProviderFactoryBean pidProviderFactory() {
+ return pidProviderFactory;
}
@Override
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java
index 4d8100124ec..33e708e7467 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java
@@ -8,6 +8,8 @@
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
+import edu.harvard.iq.dataverse.dataaccess.DataAccess;
+import edu.harvard.iq.dataverse.dataaccess.StorageIO;
import edu.harvard.iq.dataverse.externaltools.ExternalTool;
import edu.harvard.iq.dataverse.globus.GlobusServiceBean;
import edu.harvard.iq.dataverse.util.BundleUtil;
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java
index ca3f5b4bded..de3f4d2ab56 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java
@@ -316,7 +316,7 @@ private void redirectToDownloadAPI(String downloadType, Long fileId, boolean gue
Long fileMetadataId) {
String fileDownloadUrl = FileUtil.getFileDownloadUrlPath(downloadType, fileId, guestBookRecordAlreadyWritten,
fileMetadataId);
- if (downloadType.equals("GlobusTransfer")) {
+ if ("GlobusTransfer".equals(downloadType)) {
PrimeFaces.current().executeScript(URLTokenUtil.getScriptForUrl(fileDownloadUrl));
} else {
logger.fine("Redirecting to file download url: " + fileDownloadUrl);
diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java
index 479c8a429c6..ca225dccb1c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java
@@ -21,6 +21,7 @@
import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.PersistProvFreeFormCommand;
import edu.harvard.iq.dataverse.engine.command.impl.RestrictFileCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.UningestFileCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
import edu.harvard.iq.dataverse.export.ExportService;
import io.gdcc.spi.export.ExportException;
@@ -28,6 +29,8 @@
import edu.harvard.iq.dataverse.externaltools.ExternalTool;
import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean;
+import edu.harvard.iq.dataverse.ingest.IngestRequest;
+import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean;
import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
@@ -35,6 +38,8 @@
import edu.harvard.iq.dataverse.util.BundleUtil;
import edu.harvard.iq.dataverse.util.FileUtil;
import edu.harvard.iq.dataverse.util.JsfHelper;
+import edu.harvard.iq.dataverse.util.StringUtil;
+
import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
import edu.harvard.iq.dataverse.util.SystemConfig;
@@ -45,7 +50,10 @@
import java.util.Comparator;
import java.util.List;
import java.util.Set;
+import java.util.logging.Level;
import java.util.logging.Logger;
+import java.util.stream.Collectors;
+
import jakarta.ejb.EJB;
import jakarta.ejb.EJBException;
import jakarta.faces.application.FacesMessage;
@@ -112,10 +120,10 @@ public class FilePage implements java.io.Serializable {
GuestbookResponseServiceBean guestbookResponseService;
@EJB
AuthenticationServiceBean authService;
-
@EJB
DatasetServiceBean datasetService;
-
+ @EJB
+ IngestServiceBean ingestService;
@EJB
SystemConfig systemConfig;
@@ -209,7 +217,7 @@ public String init() {
// If this DatasetVersion is unpublished and permission is doesn't have permissions:
// > Go to the Login page
//
- // Check permisisons
+ // Check permissions
Boolean authorized = (fileMetadata.getDatasetVersion().isReleased())
|| (!fileMetadata.getDatasetVersion().isReleased() && this.canViewUnpublishedDataset());
@@ -238,12 +246,10 @@ public String init() {
if (file.isTabularData()) {
contentType=DataFileServiceBean.MIME_TYPE_TSV_ALT;
}
- configureTools = externalToolService.findFileToolsByTypeAndContentType(ExternalTool.Type.CONFIGURE, contentType);
- exploreTools = externalToolService.findFileToolsByTypeAndContentType(ExternalTool.Type.EXPLORE, contentType);
- queryTools = externalToolService.findFileToolsByTypeAndContentType(ExternalTool.Type.QUERY, contentType);
- Collections.sort(exploreTools, CompareExternalToolName);
- toolsWithPreviews = sortExternalTools();
-
+ loadExternalTools();
+
+
+
if (toolType != null) {
if (toolType.equals("PREVIEW")) {
if (!toolsWithPreviews.isEmpty()) {
@@ -276,6 +282,22 @@ public String init() {
return null;
}
+ private void loadExternalTools() {
+ String contentType= file.getContentType();
+ configureTools = externalToolService.findFileToolsByTypeAndContentType(ExternalTool.Type.CONFIGURE, contentType);
+ exploreTools = externalToolService.findFileToolsByTypeAndContentType(ExternalTool.Type.EXPLORE, contentType);
+ queryTools = externalToolService.findFileToolsByTypeAndContentType(ExternalTool.Type.QUERY, contentType);
+ Collections.sort(exploreTools, CompareExternalToolName);
+ toolsWithPreviews = sortExternalTools();
+ //For inaccessible files, only show the tools that have access to aux files (which are currently always accessible)
+ if(!StorageIO.isDataverseAccessible(DataAccess.getStorageDriverFromIdentifier(file.getStorageIdentifier()))) {
+ configureTools = configureTools.stream().filter(tool ->tool.accessesAuxFiles()).collect(Collectors.toList());
+ exploreTools = exploreTools.stream().filter(tool ->tool.accessesAuxFiles()).collect(Collectors.toList());
+ queryTools = queryTools.stream().filter(tool ->tool.accessesAuxFiles()).collect(Collectors.toList());
+ toolsWithPreviews = toolsWithPreviews.stream().filter(tool ->tool.accessesAuxFiles()).collect(Collectors.toList());
+ }
+ }
+
private void displayPublishMessage(){
if (fileMetadata.getDatasetVersion().isDraft() && canUpdateDataset()
&& (canPublishDataset() || !fileMetadata.getDatasetVersion().getDataset().isLockedFor(DatasetLock.Reason.InReview))){
@@ -475,6 +497,120 @@ public String restrictFile(boolean restricted) throws CommandException{
return returnToDraftVersion();
}
+ public String ingestFile() throws CommandException{
+
+ User u = session.getUser();
+ if(!u.isAuthenticated() || !u.isSuperuser()) {
+ //Shouldn't happen (choice not displayed for users who don't have the right permission), but check anyway
+ logger.warning("User: " + u.getIdentifier() + " tried to ingest a file");
+ JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.cantIngestFileWarning"));
+ return null;
+ }
+
+ DataFile dataFile = fileMetadata.getDataFile();
+ editDataset = dataFile.getOwner();
+
+ if (dataFile.isTabularData()) {
+ JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.alreadyIngestedWarning"));
+ return null;
+ }
+
+ boolean ingestLock = dataset.isLockedFor(DatasetLock.Reason.Ingest);
+
+ if (ingestLock) {
+ JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.ingestInProgressWarning"));
+ return null;
+ }
+
+ if (!FileUtil.canIngestAsTabular(dataFile)) {
+ JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.cantIngestFileWarning"));
+ return null;
+
+ }
+
+ dataFile.SetIngestScheduled();
+
+ if (dataFile.getIngestRequest() == null) {
+ dataFile.setIngestRequest(new IngestRequest(dataFile));
+ }
+
+ dataFile.getIngestRequest().setForceTypeCheck(true);
+
+ // update the datafile, to save the newIngest request in the database:
+ datafileService.save(file);
+
+ // queue the data ingest job for asynchronous execution:
+ String status = ingestService.startIngestJobs(editDataset.getId(), new ArrayList<>(Arrays.asList(dataFile)), (AuthenticatedUser) session.getUser());
+
+ if (!StringUtil.isEmpty(status)) {
+ // This most likely indicates some sort of a problem (for example,
+ // the ingest job was not put on the JMS queue because of the size
+ // of the file). But we are still returning the OK status - because
+ // from the point of view of the API, it's a success - we have
+ // successfully gone through the process of trying to schedule the
+ // ingest job...
+
+ logger.warning("Ingest Status for file: " + dataFile.getId() + " : " + status);
+ }
+ logger.fine("File: " + dataFile.getId() + " ingest queued");
+
+ init();
+ JsfHelper.addInfoMessage(BundleUtil.getStringFromBundle("file.ingest.ingestQueued"));
+ return returnToDraftVersion();
+ }
+
+ public String uningestFile() throws CommandException {
+
+ if (!file.isTabularData()) {
+ //Ingest never succeeded, either there was a failure or this is not a tabular data file
+ if (file.isIngestProblem()) {
+ //We allow anyone who can publish to uningest in order to clear a problem
+ User u = session.getUser();
+ if (!u.isAuthenticated() || !(permissionService.permissionsFor(u, file).contains(Permission.PublishDataset))) {
+ logger.warning("User: " + u.getIdentifier() + " tried to uningest a file");
+ // Shouldn't happen (choice not displayed for users who don't have the right
+ // permission), but check anyway
+ JH.addMessage(FacesMessage.SEVERITY_WARN,
+ BundleUtil.getStringFromBundle("file.ingest.cantUningestFileWarning"));
+ return null;
+ }
+ file.setIngestDone();
+ file.setIngestReport(null);
+ } else {
+ //Shouldn't happen - got called when there is no tabular data or an ingest problem
+ JH.addMessage(FacesMessage.SEVERITY_WARN,
+ BundleUtil.getStringFromBundle("file.ingest.cantUningestFileWarning"));
+ return null;
+ }
+ } else {
+ //Superuser required to uningest after a success
+ //Uningest command does it's own check for isSuperuser
+ commandEngine.submit(new UningestFileCommand(dvRequestService.getDataverseRequest(), file));
+ Long dataFileId = file.getId();
+ file = datafileService.find(dataFileId);
+ }
+ editDataset = file.getOwner();
+ if (editDataset.isReleased()) {
+ try {
+ ExportService instance = ExportService.getInstance();
+ instance.exportAllFormats(editDataset);
+
+ } catch (ExportException ex) {
+ // Something went wrong!
+ // Just like with indexing, a failure to export is not a fatal
+ // condition. We'll just log the error as a warning and keep
+ // going:
+ logger.log(Level.WARNING, "Uningest: Exception while exporting:{0}", ex.getMessage());
+ }
+ }
+ datafileService.save(file);
+
+ // Refresh filemetadata with file title, etc.
+ init();
+ JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("file.uningest.complete"));
+ return returnToDraftVersion();
+ }
+
private List filesToBeDeleted = new ArrayList<>();
public String deleteFile() {
@@ -948,6 +1084,12 @@ public boolean isPubliclyDownloadable() {
return FileUtil.isPubliclyDownloadable(fileMetadata);
}
+ public boolean isIngestable() {
+ DataFile f = fileMetadata.getDataFile();
+ //Datafile is an ingestable type and hasn't been ingested yet or had an ingest fail
+ return (FileUtil.canIngestAsTabular(f)&&!(f.isTabularData() || f.isIngestProblem()));
+ }
+
private Boolean lockedFromEditsVar;
private Boolean lockedFromDownloadVar;
diff --git a/src/main/java/edu/harvard/iq/dataverse/GlobalId.java b/src/main/java/edu/harvard/iq/dataverse/GlobalId.java
index 890b146a61c..a542cb52ac0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/GlobalId.java
+++ b/src/main/java/edu/harvard/iq/dataverse/GlobalId.java
@@ -6,7 +6,7 @@
package edu.harvard.iq.dataverse;
-import edu.harvard.iq.dataverse.pidproviders.PermaLinkPidProviderServiceBean;
+import edu.harvard.iq.dataverse.pidproviders.perma.PermaLinkPidProvider;
import edu.harvard.iq.dataverse.util.BundleUtil;
import static edu.harvard.iq.dataverse.util.StringUtil.isEmpty;
import java.net.MalformedURLException;
@@ -33,7 +33,7 @@ public GlobalId(String protocol, String authority, String identifier, String sep
this.separator = separator;
}
this.urlPrefix = urlPrefix;
- this.managingProviderName = providerName;
+ this.managingProviderId = providerName;
}
// protocol the identifier system, e.g. "doi"
@@ -42,7 +42,7 @@ public GlobalId(String protocol, String authority, String identifier, String sep
private String protocol;
private String authority;
private String identifier;
- private String managingProviderName;
+ private String managingProviderId;
private String separator = "/";
private String urlPrefix;
@@ -67,8 +67,8 @@ public String getIdentifier() {
return identifier;
}
- public String getProvider() {
- return managingProviderName;
+ public String getProviderId() {
+ return managingProviderId;
}
public String toString() {
diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java
index b0cc41eb448..6c043b78941 100644
--- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java
@@ -432,7 +432,7 @@ public Long findCountByGuestbookId(Long guestbookId, Long dataverseId) {
Query query = em.createNativeQuery(queryString);
return (Long) query.getSingleResult();
} else {
- String queryString = "select count(o) from GuestbookResponse as o, Dataset d, DvObject obj where o.dataset_id = d.id and d.id = obj.id and obj.owner_id = " + dataverseId + "and o.guestbook_id = " + guestbookId;
+ String queryString = "select count(o) from GuestbookResponse as o, Dataset d, DvObject obj where o.dataset_id = d.id and d.id = obj.id and obj.owner_id = " + dataverseId + " and o.guestbook_id = " + guestbookId;
Query query = em.createNativeQuery(queryString);
return (Long) query.getSingleResult();
}
@@ -914,7 +914,7 @@ public void save(GuestbookResponse guestbookResponse) {
public Long getDownloadCountByDataFileId(Long dataFileId) {
// datafile id is null, will return 0
- Query query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.datafile_id = " + dataFileId + "and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'");
+ Query query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.datafile_id = " + dataFileId + " and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'");
return (Long) query.getSingleResult();
}
@@ -928,7 +928,7 @@ public Long getDownloadCountByDatasetId(Long datasetId, LocalDate date) {
if(date != null) {
query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.dataset_id = " + datasetId + " and responsetime < '" + date.toString() + "' and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'");
}else {
- query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.dataset_id = " + datasetId+ "and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'");
+ query = em.createNativeQuery("select count(o.id) from GuestbookResponse o where o.dataset_id = " + datasetId+ " and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'");
}
return (Long) query.getSingleResult();
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java
index 6dbba34920b..0b66b652e0c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java
@@ -30,6 +30,8 @@
import jakarta.faces.view.ViewScoped;
import jakarta.inject.Inject;
import jakarta.inject.Named;
+import java.util.HashMap;
+import java.util.Map;
import org.apache.commons.lang3.StringUtils;
/**
@@ -430,44 +432,92 @@ public boolean isSessionUserAuthenticated() {
return false;
}
+ // The numbers of datasets and deleted/exported records below are used
+ // in rendering rules on the page. They absolutely need to be cached
+ // on the first lookup.
+
+ Map cachedSetInfoNumDatasets = new HashMap<>();
+
public int getSetInfoNumOfDatasets(OAISet oaiSet) {
if (oaiSet.isDefaultSet()) {
return getSetInfoNumOfExported(oaiSet);
}
+ if (cachedSetInfoNumDatasets.get(oaiSet.getSpec()) != null) {
+ return cachedSetInfoNumDatasets.get(oaiSet.getSpec());
+ }
+
String query = oaiSet.getDefinition();
try {
int num = oaiSetService.validateDefinitionQuery(query);
if (num > -1) {
+ cachedSetInfoNumDatasets.put(oaiSet.getSpec(), num);
return num;
}
} catch (OaiSetException ose) {
- // do notghin - will return zero.
+ // do nothing - will return zero.
}
+ cachedSetInfoNumDatasets.put(oaiSet.getSpec(), 0);
return 0;
}
+ Map cachedSetInfoNumExported = new HashMap<>();
+ Integer defaultSetNumExported = null;
+
public int getSetInfoNumOfExported(OAISet oaiSet) {
+ if (oaiSet.isDefaultSet() && defaultSetNumExported != null) {
+ return defaultSetNumExported;
+ } else if (cachedSetInfoNumExported.get(oaiSet.getSpec()) != null) {
+ return cachedSetInfoNumExported.get(oaiSet.getSpec());
+ }
+
List records = oaiRecordService.findActiveOaiRecordsBySetName(oaiSet.getSpec());
+ int num;
+
if (records == null || records.isEmpty()) {
- return 0;
+ num = 0;
+ } else {
+ num = records.size();
}
- return records.size();
+ if (oaiSet.isDefaultSet()) {
+ defaultSetNumExported = num;
+ } else {
+ cachedSetInfoNumExported.put(oaiSet.getSpec(), num);
+ }
+ return num;
}
+ Map cachedSetInfoNumDeleted = new HashMap<>();
+ Integer defaultSetNumDeleted = null;
+
public int getSetInfoNumOfDeleted(OAISet oaiSet) {
+ if (oaiSet.isDefaultSet() && defaultSetNumDeleted != null) {
+ return defaultSetNumDeleted;
+ } else if (cachedSetInfoNumDeleted.get(oaiSet.getSpec()) != null) {
+ return cachedSetInfoNumDeleted.get(oaiSet.getSpec());
+ }
+
List records = oaiRecordService.findDeletedOaiRecordsBySetName(oaiSet.getSpec());
+ int num;
+
if (records == null || records.isEmpty()) {
- return 0;
+ num = 0;
+ } else {
+ num = records.size();
}
- return records.size();
+ if (oaiSet.isDefaultSet()) {
+ defaultSetNumDeleted = num;
+ } else {
+ cachedSetInfoNumDeleted.put(oaiSet.getSpec(), num);
+ }
+ return num;
}
public void validateSetQuery() {
diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java
index 72fc6ee6d64..1eee9c65501 100644
--- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java
@@ -11,6 +11,7 @@
import edu.harvard.iq.dataverse.branding.BrandingUtil;
import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean;
import edu.harvard.iq.dataverse.dataset.DatasetUtil;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key;
import edu.harvard.iq.dataverse.util.BundleUtil;
@@ -24,11 +25,15 @@
import java.util.Arrays;
import java.util.Date;
import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
import java.util.Set;
+import java.util.logging.Level;
import java.util.logging.Logger;
-import jakarta.annotation.Resource;
import jakarta.ejb.EJB;
import jakarta.ejb.Stateless;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
import jakarta.mail.Address;
import jakarta.mail.Message;
import jakarta.mail.MessagingException;
@@ -78,77 +83,133 @@ public class MailServiceBean implements java.io.Serializable {
*/
public MailServiceBean() {
}
+
+ /**
+ * Creates a new instance of MailServiceBean with explicit injection, as used during testing.
+ */
+ public MailServiceBean(Session session, SettingsServiceBean settingsService) {
+ this.session = session;
+ this.settingsService = settingsService;
+ }
- @Resource(name = "mail/notifyMailSession")
+ @Inject
+ @Named("mail/systemSession")
private Session session;
public boolean sendSystemEmail(String to, String subject, String messageText) {
return sendSystemEmail(to, subject, messageText, false);
}
-
+
+ /**
+ * Send a system notification to one or multiple recipients by email.
+ * Will skip sending when {@link #getSystemAddress()} doesn't return a configured "from" address.
+ * @param to A comma separated list of one or multiple recipients' addresses. May contain a "personal name" and
+ * the recipients address in <>. See also {@link InternetAddress}.
+ * @param subject The message's subject
+ * @param messageText The message's text
+ * @param isHtmlContent Determine if the message text is formatted using HTML or plain text.
+ * @return Status: true if sent successfully, false otherwise
+ */
public boolean sendSystemEmail(String to, String subject, String messageText, boolean isHtmlContent) {
+ Optional optionalAddress = getSystemAddress();
+ if (optionalAddress.isEmpty()) {
+ logger.fine(() -> "Skipping sending mail to " + to + ", because no system address has been set.");
+ return false;
+ }
+ InternetAddress systemAddress = optionalAddress.get();
- boolean sent = false;
- InternetAddress systemAddress = getSystemAddress();
-
- String body = messageText
- + (isHtmlContent ? BundleUtil.getStringFromBundle("notification.email.closing.html", Arrays.asList(BrandingUtil.getSupportTeamEmailAddress(systemAddress), BrandingUtil.getSupportTeamName(systemAddress)))
- : BundleUtil.getStringFromBundle("notification.email.closing", Arrays.asList(BrandingUtil.getSupportTeamEmailAddress(systemAddress), BrandingUtil.getSupportTeamName(systemAddress))));
+ String body = messageText +
+ BundleUtil.getStringFromBundle(isHtmlContent ? "notification.email.closing.html" : "notification.email.closing",
+ List.of(BrandingUtil.getSupportTeamEmailAddress(systemAddress), BrandingUtil.getSupportTeamName(systemAddress)));
- logger.fine("Sending email to " + to + ". Subject: <<<" + subject + ">>>. Body: " + body);
+ logger.fine(() -> "Sending email to %s. Subject: <<<%s>>>. Body: %s".formatted(to, subject, body));
try {
+ // Since JavaMail 1.6, we have support for UTF-8 mail addresses and do not need to handle these ourselves.
+ InternetAddress[] recipients = InternetAddress.parse(to);
+
MimeMessage msg = new MimeMessage(session);
- if (systemAddress != null) {
- msg.setFrom(systemAddress);
- msg.setSentDate(new Date());
- String[] recipientStrings = to.split(",");
- InternetAddress[] recipients = new InternetAddress[recipientStrings.length];
- for (int i = 0; i < recipients.length; i++) {
- try {
- recipients[i] = new InternetAddress(recipientStrings[i], "", charset);
- } catch (UnsupportedEncodingException ex) {
- logger.severe(ex.getMessage());
- }
- }
- msg.setRecipients(Message.RecipientType.TO, recipients);
- msg.setSubject(subject, charset);
- if (isHtmlContent) {
- msg.setText(body, charset, "html");
- } else {
- msg.setText(body, charset);
- }
-
- try {
- Transport.send(msg, recipients);
- sent = true;
- } catch (MessagingException ssfe) {
- logger.warning("Failed to send mail to: " + to);
- logger.warning("MessagingException Message: " + ssfe);
- }
+ msg.setFrom(systemAddress);
+ msg.setSentDate(new Date());
+ msg.setRecipients(Message.RecipientType.TO, recipients);
+ msg.setSubject(subject, charset);
+ if (isHtmlContent) {
+ msg.setText(body, charset, "html");
} else {
- logger.fine("Skipping sending mail to " + to + ", because the \"no-reply\" address not set (" + Key.SystemEmail + " setting).");
+ msg.setText(body, charset);
}
- } catch (AddressException ae) {
- logger.warning("Failed to send mail to " + to);
- ae.printStackTrace(System.out);
- } catch (MessagingException me) {
- logger.warning("Failed to send mail to " + to);
- me.printStackTrace(System.out);
+
+ Transport.send(msg, recipients);
+ return true;
+ } catch (MessagingException ae) {
+ logger.log(Level.WARNING, "Failed to send mail to %s: %s".formatted(to, ae.getMessage()), ae);
+ logger.info("When UTF-8 characters in recipients: make sure MTA supports it and JVM option " + JvmSettings.MAIL_MTA_SUPPORT_UTF8.getScopedKey() + "=true");
}
- return sent;
+ return false;
}
-
- public InternetAddress getSystemAddress() {
- String systemEmail = settingsService.getValueForKey(Key.SystemEmail);
- return MailUtil.parseSystemAddress(systemEmail);
+
+ /**
+ * Lookup the system mail address ({@code InternetAddress} may contain personal and actual address).
+ * @return The system mail address or an empty {@code Optional} if not configured.
+ */
+ public Optional getSystemAddress() {
+ boolean providedByDB = false;
+ String mailAddress = JvmSettings.SYSTEM_EMAIL.lookupOptional().orElse(null);
+
+ // Try lookup of (deprecated) database setting only if not configured via MPCONFIG
+ if (mailAddress == null) {
+ mailAddress = settingsService.getValueForKey(Key.SystemEmail);
+ // Encourage people to migrate from deprecated setting
+ if (mailAddress != null) {
+ providedByDB = true;
+ logger.warning("The :SystemMail DB setting has been deprecated, please reconfigure using JVM option " + JvmSettings.SYSTEM_EMAIL.getScopedKey());
+ }
+ }
+
+ try {
+ // Parse and return.
+ return Optional.of(new InternetAddress(Objects.requireNonNull(mailAddress), true));
+ } catch (AddressException e) {
+ logger.log(Level.WARNING, "Could not parse system mail address '%s' provided by %s: "
+ .formatted(providedByDB ? "DB setting" : "JVM option", mailAddress), e);
+ } catch (NullPointerException e) {
+ // Do not pester the logs - no configuration may mean someone wants to disable mail notifications
+ logger.fine("Could not find a system mail setting in database (key :SystemEmail, deprecated) or JVM option '" + JvmSettings.SYSTEM_EMAIL.getScopedKey() + "'");
+ }
+ // We define the system email address as an optional setting, in case people do not want to enable mail
+ // notifications (like in a development context, but might be useful elsewhere, too).
+ return Optional.empty();
+ }
+
+ /**
+ * Lookup the support team mail address ({@code InternetAddress} may contain personal and actual address).
+ * Will default to return {@code #getSystemAddress} if not configured.
+ * @return Support team mail address
+ */
+ public Optional getSupportAddress() {
+ Optional supportMailAddress = JvmSettings.SUPPORT_EMAIL.lookupOptional();
+ if (supportMailAddress.isPresent()) {
+ try {
+ return Optional.of(new InternetAddress(supportMailAddress.get(), true));
+ } catch (AddressException e) {
+ logger.log(Level.WARNING, "Could not parse support mail address '%s', defaulting to system address: ".formatted(supportMailAddress.get()), e);
+ }
+ }
+ return getSystemAddress();
}
//@Resource(name="mail/notifyMailSession")
public void sendMail(String reply, String to, String cc, String subject, String messageText) {
+ Optional optionalAddress = getSystemAddress();
+ if (optionalAddress.isEmpty()) {
+ logger.fine(() -> "Skipping sending mail to " + to + ", because no system address has been set.");
+ return;
+ }
+ // Always send from system address to avoid email being blocked
+ InternetAddress fromAddress = optionalAddress.get();
+
try {
MimeMessage msg = new MimeMessage(session);
- // Always send from system address to avoid email being blocked
- InternetAddress fromAddress = getSystemAddress();
+
try {
setContactDelegation(reply, fromAddress);
} catch (UnsupportedEncodingException ex) {
@@ -466,18 +527,24 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
case RETURNEDDS:
version = (DatasetVersion) targetObject;
pattern = BundleUtil.getStringFromBundle("notification.email.wasReturnedByReviewer");
- String optionalReturnReason = "";
- /*
- FIXME
- Setting up to add single comment when design completed
- optionalReturnReason = ".";
- if (comment != null && !comment.isEmpty()) {
- optionalReturnReason = ".\n\n" + BundleUtil.getStringFromBundle("wasReturnedReason") + "\n\n" + comment;
- }
- */
+
String[] paramArrayReturnedDataset = {version.getDataset().getDisplayName(), getDatasetDraftLink(version.getDataset()),
- version.getDataset().getOwner().getDisplayName(), getDataverseLink(version.getDataset().getOwner()), optionalReturnReason};
+ version.getDataset().getOwner().getDisplayName(), getDataverseLink(version.getDataset().getOwner())};
messageText += MessageFormat.format(pattern, paramArrayReturnedDataset);
+
+ if (comment != null && !comment.isEmpty()) {
+ messageText += "\n\n" + MessageFormat.format(BundleUtil.getStringFromBundle("notification.email.wasReturnedByReviewerReason"), comment);
+ }
+
+ Dataverse d = (Dataverse) version.getDataset().getOwner();
+ List contactEmailList = new ArrayList();
+ for (DataverseContact dc : d.getDataverseContacts()) {
+ contactEmailList.add(dc.getContactEmail());
+ }
+ if (!contactEmailList.isEmpty()) {
+ String contactEmails = String.join(", ", contactEmailList);
+ messageText += "\n\n" + MessageFormat.format(BundleUtil.getStringFromBundle("notification.email.wasReturnedByReviewer.collectionContacts"), contactEmails);
+ }
return messageText;
case WORKFLOW_SUCCESS:
@@ -505,13 +572,12 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
messageText += MessageFormat.format(pattern, paramArrayStatus);
return messageText;
case CREATEACC:
- InternetAddress systemAddress = getSystemAddress();
String accountCreatedMessage = BundleUtil.getStringFromBundle("notification.email.welcome", Arrays.asList(
BrandingUtil.getInstallationBrandName(),
systemConfig.getGuidesBaseUrl(),
systemConfig.getGuidesVersion(),
- BrandingUtil.getSupportTeamName(systemAddress),
- BrandingUtil.getSupportTeamEmailAddress(systemAddress)
+ BrandingUtil.getSupportTeamName(getSystemAddress().orElse(null)),
+ BrandingUtil.getSupportTeamEmailAddress(getSystemAddress().orElse(null))
));
String optionalConfirmEmailAddon = confirmEmailService.optionalConfirmEmailAddonMsg(userNotification.getUser());
accountCreatedMessage += optionalConfirmEmailAddon;
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java
index ca2f6145cba..1ead0b13cdc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java
@@ -422,7 +422,10 @@ public void grantAccess(ActionEvent evt) {
// set request(s) granted, if they exist
for (AuthenticatedUser au : roleAssigneeService.getExplicitUsers(roleAssignee)) {
FileAccessRequest far = file.getAccessRequestForAssignee(au);
- far.setStateGranted();
+ //There may not be a request, so do the null check
+ if (far != null) {
+ far.setStateGranted();
+ }
}
datafileService.save(file);
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java b/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java
index 832d7ec19ef..54fb8f211a6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java
@@ -16,6 +16,7 @@
import java.util.logging.Logger;
import jakarta.faces.context.FacesContext;
import jakarta.faces.view.ViewScoped;
+import jakarta.ws.rs.core.Response.Status;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.servlet.http.HttpServletRequest;
@@ -87,6 +88,10 @@ public String notAuthorized(){
}
}
+ public String tooManyRequests() {
+ return sendError(Status.TOO_MANY_REQUESTS.getStatusCode());
+ }
+
public String notFound() {
return sendError(HttpServletResponse.SC_NOT_FOUND);
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
index a1de33a764e..8fb762e3e5b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
@@ -41,6 +41,9 @@
import java.util.stream.Collectors;
import static java.util.stream.Collectors.toList;
import jakarta.persistence.Query;
+import jakarta.persistence.criteria.CriteriaBuilder;
+import jakarta.persistence.criteria.CriteriaQuery;
+import jakarta.persistence.criteria.Root;
/**
* Your one-stop-shop for deciding which user can do what action on which
@@ -448,8 +451,9 @@ private boolean isPublicallyDownloadable(DvObject dvo) {
if (!df.isRestricted()) {
if (df.getOwner().getReleasedVersion() != null) {
- if (df.getOwner().getReleasedVersion().getFileMetadatas() != null) {
- for (FileMetadata fm : df.getOwner().getReleasedVersion().getFileMetadatas()) {
+ List fileMetadatas = df.getOwner().getReleasedVersion().getFileMetadatas();
+ if (fileMetadatas != null) {
+ for (FileMetadata fm : fileMetadatas) {
if (df.equals(fm.getDataFile())) {
return true;
}
@@ -837,4 +841,57 @@ public boolean isMatchingWorkflowLock(Dataset d, String userId, String invocatio
return false;
}
+ /**
+ * Checks if a DataverseRequest can download at least one file of the target DatasetVersion.
+ *
+ * @param dataverseRequest DataverseRequest to check
+ * @param datasetVersion DatasetVersion to check
+ * @return boolean indicating whether the user can download at least one file or not
+ */
+ public boolean canDownloadAtLeastOneFile(DataverseRequest dataverseRequest, DatasetVersion datasetVersion) {
+ if (hasUnrestrictedReleasedFiles(datasetVersion)) {
+ return true;
+ }
+ List fileMetadatas = datasetVersion.getFileMetadatas();
+ for (FileMetadata fileMetadata : fileMetadatas) {
+ DataFile dataFile = fileMetadata.getDataFile();
+ Set roleAssignees = new HashSet<>(groupService.groupsFor(dataverseRequest, dataFile));
+ roleAssignees.add(dataverseRequest.getUser());
+ if (hasGroupPermissionsFor(roleAssignees, dataFile, EnumSet.of(Permission.DownloadFile))) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Checks if a DatasetVersion has unrestricted released files.
+ *
+ * This method is mostly based on {@link #isPublicallyDownloadable(DvObject)} although in this case, instead of basing
+ * the search on a particular file, it searches for the total number of files in the target version that are present
+ * in the released version.
+ *
+ * @param targetDatasetVersion DatasetVersion to check
+ * @return boolean indicating whether the dataset version has released files or not
+ */
+ private boolean hasUnrestrictedReleasedFiles(DatasetVersion targetDatasetVersion) {
+ Dataset targetDataset = targetDatasetVersion.getDataset();
+ if (!targetDataset.isReleased()) {
+ return false;
+ }
+ CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
+ CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class);
+ Root datasetVersionRoot = criteriaQuery.from(DatasetVersion.class);
+ Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class);
+ criteriaQuery
+ .select(criteriaBuilder.count(fileMetadataRoot))
+ .where(criteriaBuilder.and(
+ criteriaBuilder.equal(fileMetadataRoot.get("dataFile").get("restricted"), false),
+ criteriaBuilder.equal(datasetVersionRoot.get("dataset"), targetDataset),
+ criteriaBuilder.equal(datasetVersionRoot.get("versionState"), DatasetVersion.VersionState.RELEASED),
+ fileMetadataRoot.in(targetDatasetVersion.getFileMetadatas()),
+ fileMetadataRoot.in(datasetVersionRoot.get("fileMetadatas"))));
+ Long result = em.createQuery(criteriaQuery).getSingleResult();
+ return result > 0;
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java b/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java
index 71318a0184a..a387b27d98b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java
@@ -17,6 +17,7 @@
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import edu.harvard.iq.dataverse.api.AbstractApiBean;
+import edu.harvard.iq.dataverse.pidproviders.PidProvider;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.FileUtil;
import java.io.BufferedReader;
@@ -203,35 +204,21 @@ public DataFile createPackageDataFile(Dataset dataset, String folderName, long t
fmd.setDatasetVersion(dataset.getLatestVersion());
FileUtil.generateS3PackageStorageIdentifier(packageFile);
-
- GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(packageFile.getProtocol(), commandEngine.getContext());
+ PidProvider pidProvider = commandEngine.getContext().dvObjects().getEffectivePidGenerator(dataset);
if (packageFile.getIdentifier() == null || packageFile.getIdentifier().isEmpty()) {
- String packageIdentifier = idServiceBean.generateDataFileIdentifier(packageFile);
- packageFile.setIdentifier(packageIdentifier);
- }
-
- String nonNullDefaultIfKeyNotFound = "";
- String protocol = commandEngine.getContext().settings().getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound);
- String authority = commandEngine.getContext().settings().getValueForKey(SettingsServiceBean.Key.Authority, nonNullDefaultIfKeyNotFound);
-
- if (packageFile.getProtocol() == null) {
- packageFile.setProtocol(protocol);
- }
- if (packageFile.getAuthority() == null) {
- packageFile.setAuthority(authority);
+ pidProvider.generatePid(packageFile);
}
if (!packageFile.isIdentifierRegistered()) {
String doiRetString = "";
- idServiceBean = GlobalIdServiceBean.getBean(commandEngine.getContext());
try {
- doiRetString = idServiceBean.createIdentifier(packageFile);
+ doiRetString = pidProvider.createIdentifier(packageFile);
} catch (Throwable e) {
}
// Check return value to make sure registration succeeded
- if (!idServiceBean.registerWhenPublished() && doiRetString.contains(packageFile.getIdentifier())) {
+ if (!pidProvider.registerWhenPublished() && doiRetString.contains(packageFile.getIdentifier())) {
packageFile.setIdentifierRegistered(true);
packageFile.setGlobalIdCreateTime(new Date());
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java
index 6be768321c4..5a522eb7e45 100644
--- a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java
+++ b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java
@@ -6,7 +6,7 @@
import edu.harvard.iq.dataverse.settings.JvmSettings;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.BundleUtil;
-import edu.harvard.iq.dataverse.util.MailUtil;
+import edu.harvard.iq.dataverse.util.JsfHelper;
import edu.harvard.iq.dataverse.util.SystemConfig;
import java.util.Optional;
import java.util.Random;
@@ -101,8 +101,7 @@ public void initUserInput(ActionEvent ae) {
op1 = Long.valueOf(random.nextInt(10));
op2 = Long.valueOf(random.nextInt(10));
userSum = null;
- String supportEmail = JvmSettings.SUPPORT_EMAIL.lookupOptional().orElse(settingsService.getValueForKey(SettingsServiceBean.Key.SystemEmail));
- systemAddress = MailUtil.parseSystemAddress(supportEmail);
+ systemAddress = mailService.getSupportAddress().orElse(null);
}
public Long getOp1() {
@@ -217,6 +216,7 @@ public String sendMessage() {
}
logger.fine("sending feedback: " + feedback);
mailService.sendMail(feedback.getFromEmail(), feedback.getToEmail(), feedback.getCcEmail(), feedback.getSubject(), feedback.getBody());
+ JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("contact.sent"));
return null;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java
index 8ab1e87aef2..91bcc508b78 100644
--- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java
@@ -6,9 +6,9 @@
package edu.harvard.iq.dataverse;
import edu.harvard.iq.dataverse.branding.BrandingUtil;
-import edu.harvard.iq.dataverse.dataaccess.AbstractRemoteOverlayAccessIO;
import edu.harvard.iq.dataverse.dataaccess.DataAccess;
import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore;
+import edu.harvard.iq.dataverse.dataaccess.StorageIO;
import edu.harvard.iq.dataverse.settings.JvmSettings;
import edu.harvard.iq.dataverse.settings.Setting;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
@@ -66,6 +66,9 @@ public class SettingsWrapper implements java.io.Serializable {
@EJB
MetadataBlockServiceBean mdbService;
+
+ @EJB
+ MailServiceBean mailServiceBean;
private Map settingsMap;
@@ -344,7 +347,7 @@ public boolean isDownloadable(FileMetadata fmd) {
if(isGlobusFileDownload()) {
String driverId = DataAccess.getStorageDriverFromIdentifier(fmd.getDataFile().getStorageIdentifier());
- downloadable = downloadable && !AbstractRemoteOverlayAccessIO.isNotDataverseAccessible(driverId);
+ downloadable = downloadable && StorageIO.isDataverseAccessible(driverId);
}
return downloadable;
}
@@ -400,22 +403,15 @@ public boolean isHTTPUpload(){
return httpUpload;
}
- public boolean isDataFilePIDSequentialDependent(){
- if (dataFilePIDSequentialDependent == null) {
- dataFilePIDSequentialDependent = systemConfig.isDataFilePIDSequentialDependent();
- }
- return dataFilePIDSequentialDependent;
- }
-
public String getSupportTeamName() {
- String systemEmail = getValueForKey(SettingsServiceBean.Key.SystemEmail);
- InternetAddress systemAddress = MailUtil.parseSystemAddress(systemEmail);
+ // TODO: should this be replaced with mailServiceBean.getSupportAddress() to expose a configured support team?
+ InternetAddress systemAddress = mailServiceBean.getSystemAddress().orElse(null);
return BrandingUtil.getSupportTeamName(systemAddress);
}
public String getSupportTeamEmail() {
- String systemEmail = getValueForKey(SettingsServiceBean.Key.SystemEmail);
- InternetAddress systemAddress = MailUtil.parseSystemAddress(systemEmail);
+ // TODO: should this be replaced with mailServiceBean.getSupportAddress() to expose a configured support team?
+ InternetAddress systemAddress = mailServiceBean.getSystemAddress().orElse(null);
return BrandingUtil.getSupportTeamEmailAddress(systemAddress) != null ? BrandingUtil.getSupportTeamEmailAddress(systemAddress) : BrandingUtil.getSupportTeamName(systemAddress);
}
@@ -470,23 +466,6 @@ public Map getConfiguredLocales() {
return configuredLocales;
}
- public boolean isDoiInstallation() {
- String protocol = getValueForKey(SettingsServiceBean.Key.Protocol);
- if ("doi".equals(protocol)) {
- return true;
- } else {
- return false;
- }
- }
-
- public boolean isDataCiteInstallation() {
- String protocol = getValueForKey(SettingsServiceBean.Key.DoiProvider);
- if ("DataCite".equals(protocol)) {
- return true;
- } else {
- return false;
- }
- }
public boolean isMakeDataCountDisplayEnabled() {
boolean safeDefaultIfKeyNotFound = (getValueForKey(SettingsServiceBean.Key.MDCLogPath)!=null); //Backward compatible
diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java
index ae81a9326c4..b6ab23848e2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java
@@ -5,11 +5,14 @@
*/
package edu.harvard.iq.dataverse;
+import edu.harvard.iq.dataverse.dataaccess.DataAccess;
import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
-
+import edu.harvard.iq.dataverse.dataaccess.StorageIO;
+import edu.harvard.iq.dataverse.dataset.DatasetUtil;
import edu.harvard.iq.dataverse.search.SolrSearchResult;
import edu.harvard.iq.dataverse.util.SystemConfig;
+import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Logger;
@@ -170,17 +173,30 @@ public String getDatasetCardImageAsUrl(Dataset dataset, Long versionId, boolean
if (thumbnailFile == null) {
- // We attempt to auto-select via the optimized, native query-based method
+ boolean hasDatasetLogo = false;
+ StorageIO storageIO = null;
+ try {
+ storageIO = DataAccess.getStorageIO(dataset);
+ if (storageIO.isAuxObjectCached(DatasetUtil.datasetLogoFilenameFinal)) {
+ // If not, return null/use the default, otherwise pass the logo URL
+ hasDatasetLogo = true;
+ }
+ } catch (IOException ioex) {
+ logger.warning("getDatasetCardImageAsUrl(): Failed to initialize dataset StorageIO for "
+ + dataset.getStorageIdentifier() + " (" + ioex.getMessage() + ")");
+ }
+ // If no other logo we attempt to auto-select via the optimized, native
+ // query-based method
// from the DatasetVersionService:
- if (datasetVersionService.getThumbnailByVersionId(versionId) == null) {
+ if (!hasDatasetLogo && datasetVersionService.getThumbnailByVersionId(versionId) == null) {
return null;
}
}
-
String url = SystemConfig.getDataverseSiteUrlStatic() + "/api/datasets/" + dataset.getId() + "/logo";
logger.fine("getDatasetCardImageAsUrl: " + url);
this.dvobjectThumbnailsMap.put(datasetId,url);
return url;
+
}
// it's the responsibility of the user - to make sure the search result
diff --git a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java
index 93892376edc..d63fcfa3e34 100644
--- a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java
@@ -147,6 +147,8 @@ private AuthenticatedUser createAuthenticatedUserForView (Object[] dbRowValues,
user.setMutedEmails(Type.tokenizeToSet((String) dbRowValues[15]));
user.setMutedNotifications(Type.tokenizeToSet((String) dbRowValues[15]));
+ user.setRateLimitTier((int)dbRowValues[17]);
+
user.setRoles(roles);
return user;
}
@@ -419,7 +421,7 @@ private List