diff --git a/.env b/.env index 3467f8df73b..e3ececc2e54 100644 --- a/.env +++ b/.env @@ -1,4 +1,4 @@ APP_IMAGE=gdcc/dataverse:unstable POSTGRES_VERSION=13 DATAVERSE_DB_USER=dataverse -SOLR_VERSION=8.11.1 +SOLR_VERSION=9.3.0 diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml index f858272ca2b..2443ef8b2e0 100644 --- a/.github/workflows/deploy_beta_testing.yml +++ b/.github/workflows/deploy_beta_testing.yml @@ -8,7 +8,6 @@ on: jobs: build: runs-on: ubuntu-latest - environment: beta-testing steps: - uses: actions/checkout@v3 @@ -38,7 +37,6 @@ jobs: deploy-to-payara: needs: build runs-on: ubuntu-latest - environment: beta-testing steps: - uses: actions/checkout@v3 @@ -55,11 +53,11 @@ jobs: - name: Copy war file to remote instance uses: appleboy/scp-action@master with: - host: ${{ secrets.PAYARA_INSTANCE_HOST }} - username: ${{ secrets.PAYARA_INSTANCE_USERNAME }} - key: ${{ secrets.PAYARA_INSTANCE_SSH_PRIVATE_KEY }} + host: ${{ secrets.BETA_PAYARA_INSTANCE_HOST }} + username: ${{ secrets.BETA_PAYARA_INSTANCE_USERNAME }} + key: ${{ secrets.BETA_PAYARA_INSTANCE_SSH_PRIVATE_KEY }} source: './${{ env.war_file }}' - target: '/home/${{ secrets.PAYARA_INSTANCE_USERNAME }}' + target: '/home/${{ secrets.BETA_PAYARA_INSTANCE_USERNAME }}' overwrite: true - name: Execute payara war deployment remotely @@ -67,17 +65,17 @@ jobs: env: INPUT_WAR_FILE: ${{ env.war_file }} with: - host: ${{ secrets.PAYARA_INSTANCE_HOST }} - username: ${{ secrets.PAYARA_INSTANCE_USERNAME }} - key: ${{ secrets.PAYARA_INSTANCE_SSH_PRIVATE_KEY }} + host: ${{ secrets.BETA_PAYARA_INSTANCE_HOST }} + username: ${{ secrets.BETA_PAYARA_INSTANCE_USERNAME }} + key: ${{ secrets.BETA_PAYARA_INSTANCE_SSH_PRIVATE_KEY }} envs: INPUT_WAR_FILE script: | APPLICATION_NAME=dataverse-backend - ASADMIN='/usr/local/payara5/bin/asadmin --user admin' + ASADMIN='/usr/local/payara6/bin/asadmin --user admin' $ASADMIN undeploy $APPLICATION_NAME $ASADMIN stop-domain - rm -rf /usr/local/payara5/glassfish/domains/domain1/generated - rm -rf /usr/local/payara5/glassfish/domains/domain1/osgi-cache + rm -rf /usr/local/payara6/glassfish/domains/domain1/generated + rm -rf /usr/local/payara6/glassfish/domains/domain1/osgi-cache $ASADMIN start-domain $ASADMIN deploy --name $APPLICATION_NAME $INPUT_WAR_FILE $ASADMIN stop-domain diff --git a/.github/workflows/shellcheck.yml b/.github/workflows/shellcheck.yml index dc0d19914a1..56f7d648dc4 100644 --- a/.github/workflows/shellcheck.yml +++ b/.github/workflows/shellcheck.yml @@ -42,5 +42,4 @@ jobs: scripts/issues/* scripts/r/* scripts/tests/* - scripts/vagrant/* tests/* diff --git a/.github/workflows/shellspec.yml b/.github/workflows/shellspec.yml index 5c251cfc897..227a74fa00f 100644 --- a/.github/workflows/shellspec.yml +++ b/.github/workflows/shellspec.yml @@ -60,7 +60,7 @@ jobs: shellspec shellspec-macos: name: "MacOS" - runs-on: macos-10.15 + runs-on: macos-latest steps: - name: Install shellspec run: curl -fsSL https://git.io/shellspec | sh -s 0.28.1 --yes diff --git a/.gitignore b/.gitignore index 5a2da73fb2c..7f0d3a2b466 100644 --- a/.gitignore +++ b/.gitignore @@ -18,7 +18,6 @@ GRTAGS .Trashes ehthumbs.db Thumbs.db -.vagrant *.pyc *.swp scripts/api/py_api_wrapper/demo-data/* diff --git a/Vagrantfile b/Vagrantfile deleted file mode 100644 index 8293fbaf5fc..00000000000 --- a/Vagrantfile +++ /dev/null @@ -1,27 +0,0 @@ -# -*- mode: ruby -*- -# vi: set ft=ruby : - -VAGRANTFILE_API_VERSION = "2" - -Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| - config.vm.box = "bento/rockylinux-8.4" - - config.vm.provider "virtualbox" do |vbox| - vbox.cpus = 4 - vbox.memory = 4096 - end - - config.vm.provision "shell", path: "scripts/vagrant/setup.sh" - config.vm.provision "shell", path: "scripts/vagrant/setup-solr.sh" - config.vm.provision "shell", path: "scripts/vagrant/install-dataverse.sh" - - config.vm.network "private_network", type: "dhcp" - config.vm.network "forwarded_port", guest: 80, host: 8888 - config.vm.network "forwarded_port", guest: 443, host: 9999 - config.vm.network "forwarded_port", guest: 8983, host: 8993 - config.vm.network "forwarded_port", guest: 8080, host: 8088 - config.vm.network "forwarded_port", guest: 8181, host: 8188 - - config.vm.synced_folder ".", "/dataverse" - -end diff --git a/conf/solr/8.11.1/readme.md b/conf/solr/8.11.1/readme.md deleted file mode 100644 index 4457cf9a7df..00000000000 --- a/conf/solr/8.11.1/readme.md +++ /dev/null @@ -1 +0,0 @@ -Please see the dev guide for what to do with Solr config files. \ No newline at end of file diff --git a/conf/solr/8.11.1/schema.xml b/conf/solr/9.3.0/schema.xml similarity index 96% rename from conf/solr/8.11.1/schema.xml rename to conf/solr/9.3.0/schema.xml index ac271c884b8..3711ffeddba 100644 --- a/conf/solr/8.11.1/schema.xml +++ b/conf/solr/9.3.0/schema.xml @@ -23,7 +23,7 @@ For more information, on how to customize this file, please see - http://lucene.apache.org/solr/guide/documents-fields-and-schema-design.html + https://solr.apache.org/guide/solr/latest/indexing-guide/schema-elements.html PERFORMANCE NOTE: this schema includes many optional features and should not be used for benchmarking. To improve performance one could @@ -38,7 +38,7 @@ catchall "text" field, and use that for searching. --> - + - - - - - - - - - + + @@ -163,7 +156,7 @@ - + @@ -200,7 +193,7 @@ - + @@ -208,9 +201,9 @@ - + - + @@ -218,10 +211,10 @@ - + - + @@ -658,7 +651,8 @@ + RESTRICTION: the glob-like pattern in the name attribute must have a "*" + only at the start or the end. --> @@ -666,19 +660,23 @@ - + + + + + + + - - - - + + @@ -724,43 +722,6 @@ field first in an ascending sort and last in a descending sort. --> - - - - - - - - - - - - - - - - + - + + + + + + + + + + + + + + + + + + + - - - - - - - 7.3.0 - - - - - - - - - - - - - - - - - - - - ${solr.data.dir:} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ${solr.lock.type:native} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ${solr.ulog.dir:} - ${solr.ulog.numVersionBuckets:65536} - - - - - ${solr.autoCommit.maxTime:15000} - false - - - - - - ${solr.autoSoftCommit.maxTime:-1} - - - - - - - - - - - - - - 1024 - - - - - - - - - - - - - - - - - - - - - - - - true - - - - - - 20 - - - 200 - - - - - - - - - - - - - - - - false - - - - - - - - - - - - - - - - - - - - - - explicit - 10 - edismax - 0.075 - - dvName^400 - authorName^180 - dvSubject^190 - dvDescription^180 - dvAffiliation^170 - title^130 - subject^120 - keyword^110 - topicClassValue^100 - dsDescriptionValue^90 - authorAffiliation^80 - publicationCitation^60 - producerName^50 - fileName^30 - fileDescription^30 - variableLabel^20 - variableName^10 - _text_^1.0 - - - dvName^200 - authorName^100 - dvSubject^100 - dvDescription^100 - dvAffiliation^100 - title^75 - subject^75 - keyword^75 - topicClassValue^75 - dsDescriptionValue^75 - authorAffiliation^75 - publicationCitation^75 - producerName^75 - - - - isHarvested:false^25000 - - - - - - - - - - - - - - - - - - explicit - json - true - - - - - - - - explicit - - - - - - _text_ - - - - - - - true - ignored_ - _text_ - - - - - - - - - text_general - - - - - - default - _text_ - solr.DirectSolrSpellChecker - - internal - - 0.5 - - 2 - - 1 - - 5 - - 4 - - 0.01 - - - - - - - - - - - - default - on - true - 10 - 5 - 5 - true - true - 10 - 5 - - - spellcheck - - - - - - - - - - true - - - tvComponent - - - - - - - - - - - - true - false - - - terms - - - - - - - - string - - - - - - explicit - - - elevator - - - - - - - - - - - 100 - - - - - - - - 70 - - 0.5 - - [-\w ,/\n\"']{20,200} - - - - - - - ]]> - ]]> - - - - - - - - - - - - - - - - - - - - - - - - ,, - ,, - ,, - ,, - ,]]> - ]]> - - - - - - 10 - .,!? - - - - - - - WORD - - - en - US - - - - - - - - - - - - - - [^\w-\.] - _ - - - - - - - yyyy-MM-dd'T'HH:mm:ss.SSSZ - yyyy-MM-dd'T'HH:mm:ss,SSSZ - yyyy-MM-dd'T'HH:mm:ss.SSS - yyyy-MM-dd'T'HH:mm:ss,SSS - yyyy-MM-dd'T'HH:mm:ssZ - yyyy-MM-dd'T'HH:mm:ss - yyyy-MM-dd'T'HH:mmZ - yyyy-MM-dd'T'HH:mm - yyyy-MM-dd HH:mm:ss.SSSZ - yyyy-MM-dd HH:mm:ss,SSSZ - yyyy-MM-dd HH:mm:ss.SSS - yyyy-MM-dd HH:mm:ss,SSS - yyyy-MM-dd HH:mm:ssZ - yyyy-MM-dd HH:mm:ss - yyyy-MM-dd HH:mmZ - yyyy-MM-dd HH:mm - yyyy-MM-dd - - - - - - - - - - - - - - - - - - - - - - - - - - - - - text/plain; charset=UTF-8 - - - - - ${velocity.template.base.dir:} - ${velocity.solr.resource.loader.enabled:true} - ${velocity.params.resource.loader.enabled:false} - - - - - 5 - - - - - - - - - - - - - - + + + + + + + + + 9.7 + + + + + + + + + + + ${solr.data.dir:} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ${solr.lock.type:native} + + + + + + + + + + + + + + + + + + + + + ${solr.ulog.dir:} + ${solr.ulog.numVersionBuckets:65536} + + + + + ${solr.autoCommit.maxTime:15000} + false + + + + + + ${solr.autoSoftCommit.maxTime:-1} + + + + + + + + + + + + + + ${solr.max.booleanClauses:1024} + + + + + + + + + + + + + + + + + + + + + + + + true + + + + + + 20 + + + 200 + + + + + + + + + + + + + + + + + + + false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + explicit + 10 + + edismax + 0.075 + + dvName^400 + authorName^180 + dvSubject^190 + dvDescription^180 + dvAffiliation^170 + title^130 + subject^120 + keyword^110 + topicClassValue^100 + dsDescriptionValue^90 + authorAffiliation^80 + publicationCitation^60 + producerName^50 + fileName^30 + fileDescription^30 + variableLabel^20 + variableName^10 + _text_^1.0 + + + dvName^200 + authorName^100 + dvSubject^100 + dvDescription^100 + dvAffiliation^100 + title^75 + subject^75 + keyword^75 + topicClassValue^75 + dsDescriptionValue^75 + authorAffiliation^75 + publicationCitation^75 + producerName^75 + + + + isHarvested:false^25000 + + + + + + + + explicit + json + true + + + + + + + _text_ + + + + + + + text_general + + + + + + default + _text_ + solr.DirectSolrSpellChecker + + internal + + 0.5 + + 2 + + 1 + + 5 + + 4 + + 0.01 + + + + + + + + + + + + + + + + + + + + + 100 + + + + + + + + 70 + + 0.5 + + [-\w ,/\n\"']{20,200} + + + + + + + ]]> + ]]> + + + + + + + + + + + + + + + + + + + + + + + + ,, + ,, + ,, + ,, + ,]]> + ]]> + + + + + + 10 + .,!? + + + + + + + WORD + + + en + US + + + + + + + + + + + + + [^\w-\.] + _ + + + + + + + yyyy-MM-dd['T'[HH:mm[:ss[.SSS]][z + yyyy-MM-dd['T'[HH:mm[:ss[,SSS]][z + yyyy-MM-dd HH:mm[:ss[.SSS]][z + yyyy-MM-dd HH:mm[:ss[,SSS]][z + [EEE, ]dd MMM yyyy HH:mm[:ss] z + EEEE, dd-MMM-yy HH:mm:ss z + EEE MMM ppd HH:mm:ss [z ]yyyy + + + + + java.lang.String + text_general + + *_str + 256 + + + true + + + java.lang.Boolean + booleans + + + java.util.Date + pdates + + + java.lang.Long + java.lang.Integer + plongs + + + java.lang.Number + pdoubles + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/conf/solr/8.11.1/update-fields.sh b/conf/solr/9.3.0/update-fields.sh similarity index 100% rename from conf/solr/8.11.1/update-fields.sh rename to conf/solr/9.3.0/update-fields.sh diff --git a/conf/vagrant/etc/shibboleth/attribute-map.xml b/conf/vagrant/etc/shibboleth/attribute-map.xml deleted file mode 100644 index f6386b620f5..00000000000 --- a/conf/vagrant/etc/shibboleth/attribute-map.xml +++ /dev/null @@ -1,141 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml b/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml deleted file mode 100644 index 67225b5e670..00000000000 --- a/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml +++ /dev/null @@ -1,298 +0,0 @@ - - - - - - - - - - - - - - - - - - - - testshib.org - - TestShib Test IdP - TestShib IdP. Use this as a source of attributes - for your test SP. - https://www.testshib.org/images/testshib-transp.png - - - - - - - - MIIEDjCCAvagAwIBAgIBADANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJVUzEV - MBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1cmdoMREwDwYD - VQQKEwhUZXN0U2hpYjEZMBcGA1UEAxMQaWRwLnRlc3RzaGliLm9yZzAeFw0wNjA4 - MzAyMTEyMjVaFw0xNjA4MjcyMTEyMjVaMGcxCzAJBgNVBAYTAlVTMRUwEwYDVQQI - EwxQZW5uc3lsdmFuaWExEzARBgNVBAcTClBpdHRzYnVyZ2gxETAPBgNVBAoTCFRl - c3RTaGliMRkwFwYDVQQDExBpZHAudGVzdHNoaWIub3JnMIIBIjANBgkqhkiG9w0B - AQEFAAOCAQ8AMIIBCgKCAQEArYkCGuTmJp9eAOSGHwRJo1SNatB5ZOKqDM9ysg7C - yVTDClcpu93gSP10nH4gkCZOlnESNgttg0r+MqL8tfJC6ybddEFB3YBo8PZajKSe - 3OQ01Ow3yT4I+Wdg1tsTpSge9gEz7SrC07EkYmHuPtd71CHiUaCWDv+xVfUQX0aT - NPFmDixzUjoYzbGDrtAyCqA8f9CN2txIfJnpHE6q6CmKcoLADS4UrNPlhHSzd614 - kR/JYiks0K4kbRqCQF0Dv0P5Di+rEfefC6glV8ysC8dB5/9nb0yh/ojRuJGmgMWH - gWk6h0ihjihqiu4jACovUZ7vVOCgSE5Ipn7OIwqd93zp2wIDAQABo4HEMIHBMB0G - A1UdDgQWBBSsBQ869nh83KqZr5jArr4/7b+QazCBkQYDVR0jBIGJMIGGgBSsBQ86 - 9nh83KqZr5jArr4/7b+Qa6FrpGkwZzELMAkGA1UEBhMCVVMxFTATBgNVBAgTDFBl - bm5zeWx2YW5pYTETMBEGA1UEBxMKUGl0dHNidXJnaDERMA8GA1UEChMIVGVzdFNo - aWIxGTAXBgNVBAMTEGlkcC50ZXN0c2hpYi5vcmeCAQAwDAYDVR0TBAUwAwEB/zAN - BgkqhkiG9w0BAQUFAAOCAQEAjR29PhrCbk8qLN5MFfSVk98t3CT9jHZoYxd8QMRL - I4j7iYQxXiGJTT1FXs1nd4Rha9un+LqTfeMMYqISdDDI6tv8iNpkOAvZZUosVkUo - 93pv1T0RPz35hcHHYq2yee59HJOco2bFlcsH8JBXRSRrJ3Q7Eut+z9uo80JdGNJ4 - /SJy5UorZ8KazGj16lfJhOBXldgrhppQBb0Nq6HKHguqmwRfJ+WkxemZXzhediAj - Geka8nz8JjwxpUjAiSWYKLtJhGEaTqCYxCCX2Dw+dOTqUzHOZ7WKv4JXPK5G/Uhr - 8K/qhmFT2nIQi538n6rVYLeWj8Bbnl+ev0peYzxFyF5sQA== - - - - - - - - - - - - - - - urn:mace:shibboleth:1.0:nameIdentifier - urn:oasis:names:tc:SAML:2.0:nameid-format:transient - - - - - - - - - - - - - - - - MIIEDjCCAvagAwIBAgIBADANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJVUzEV - MBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1cmdoMREwDwYD - VQQKEwhUZXN0U2hpYjEZMBcGA1UEAxMQaWRwLnRlc3RzaGliLm9yZzAeFw0wNjA4 - MzAyMTEyMjVaFw0xNjA4MjcyMTEyMjVaMGcxCzAJBgNVBAYTAlVTMRUwEwYDVQQI - EwxQZW5uc3lsdmFuaWExEzARBgNVBAcTClBpdHRzYnVyZ2gxETAPBgNVBAoTCFRl - c3RTaGliMRkwFwYDVQQDExBpZHAudGVzdHNoaWIub3JnMIIBIjANBgkqhkiG9w0B - AQEFAAOCAQ8AMIIBCgKCAQEArYkCGuTmJp9eAOSGHwRJo1SNatB5ZOKqDM9ysg7C - yVTDClcpu93gSP10nH4gkCZOlnESNgttg0r+MqL8tfJC6ybddEFB3YBo8PZajKSe - 3OQ01Ow3yT4I+Wdg1tsTpSge9gEz7SrC07EkYmHuPtd71CHiUaCWDv+xVfUQX0aT - NPFmDixzUjoYzbGDrtAyCqA8f9CN2txIfJnpHE6q6CmKcoLADS4UrNPlhHSzd614 - kR/JYiks0K4kbRqCQF0Dv0P5Di+rEfefC6glV8ysC8dB5/9nb0yh/ojRuJGmgMWH - gWk6h0ihjihqiu4jACovUZ7vVOCgSE5Ipn7OIwqd93zp2wIDAQABo4HEMIHBMB0G - A1UdDgQWBBSsBQ869nh83KqZr5jArr4/7b+QazCBkQYDVR0jBIGJMIGGgBSsBQ86 - 9nh83KqZr5jArr4/7b+Qa6FrpGkwZzELMAkGA1UEBhMCVVMxFTATBgNVBAgTDFBl - bm5zeWx2YW5pYTETMBEGA1UEBxMKUGl0dHNidXJnaDERMA8GA1UEChMIVGVzdFNo - aWIxGTAXBgNVBAMTEGlkcC50ZXN0c2hpYi5vcmeCAQAwDAYDVR0TBAUwAwEB/zAN - BgkqhkiG9w0BAQUFAAOCAQEAjR29PhrCbk8qLN5MFfSVk98t3CT9jHZoYxd8QMRL - I4j7iYQxXiGJTT1FXs1nd4Rha9un+LqTfeMMYqISdDDI6tv8iNpkOAvZZUosVkUo - 93pv1T0RPz35hcHHYq2yee59HJOco2bFlcsH8JBXRSRrJ3Q7Eut+z9uo80JdGNJ4 - /SJy5UorZ8KazGj16lfJhOBXldgrhppQBb0Nq6HKHguqmwRfJ+WkxemZXzhediAj - Geka8nz8JjwxpUjAiSWYKLtJhGEaTqCYxCCX2Dw+dOTqUzHOZ7WKv4JXPK5G/Uhr - 8K/qhmFT2nIQi538n6rVYLeWj8Bbnl+ev0peYzxFyF5sQA== - - - - - - - - - - - - - - - - urn:mace:shibboleth:1.0:nameIdentifier - urn:oasis:names:tc:SAML:2.0:nameid-format:transient - - - - - TestShib Two Identity Provider - TestShib Two - http://www.testshib.org/testshib-two/ - - - Nate - Klingenstein - ndk@internet2.edu - - - - - - - - - - - - - - - - - - - - - - - - - TestShib Test SP - TestShib SP. Log into this to test your machine. - Once logged in check that all attributes that you expected have been - released. - https://www.testshib.org/images/testshib-transp.png - - - - - - - - MIIEPjCCAyagAwIBAgIBADANBgkqhkiG9w0BAQUFADB3MQswCQYDVQQGEwJVUzEV - MBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1cmdoMSIwIAYD - VQQKExlUZXN0U2hpYiBTZXJ2aWNlIFByb3ZpZGVyMRgwFgYDVQQDEw9zcC50ZXN0 - c2hpYi5vcmcwHhcNMDYwODMwMjEyNDM5WhcNMTYwODI3MjEyNDM5WjB3MQswCQYD - VQQGEwJVUzEVMBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1 - cmdoMSIwIAYDVQQKExlUZXN0U2hpYiBTZXJ2aWNlIFByb3ZpZGVyMRgwFgYDVQQD - Ew9zcC50ZXN0c2hpYi5vcmcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB - AQDJyR6ZP6MXkQ9z6RRziT0AuCabDd3x1m7nLO9ZRPbr0v1LsU+nnC363jO8nGEq - sqkgiZ/bSsO5lvjEt4ehff57ERio2Qk9cYw8XCgmYccVXKH9M+QVO1MQwErNobWb - AjiVkuhWcwLWQwTDBowfKXI87SA7KR7sFUymNx5z1aoRvk3GM++tiPY6u4shy8c7 - vpWbVfisfTfvef/y+galxjPUQYHmegu7vCbjYP3On0V7/Ivzr+r2aPhp8egxt00Q - XpilNai12LBYV3Nv/lMsUzBeB7+CdXRVjZOHGuQ8mGqEbsj8MBXvcxIKbcpeK5Zi - JCVXPfarzuriM1G5y5QkKW+LAgMBAAGjgdQwgdEwHQYDVR0OBBYEFKB6wPDxwYrY - StNjU5P4b4AjBVQVMIGhBgNVHSMEgZkwgZaAFKB6wPDxwYrYStNjU5P4b4AjBVQV - oXukeTB3MQswCQYDVQQGEwJVUzEVMBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYD - VQQHEwpQaXR0c2J1cmdoMSIwIAYDVQQKExlUZXN0U2hpYiBTZXJ2aWNlIFByb3Zp - ZGVyMRgwFgYDVQQDEw9zcC50ZXN0c2hpYi5vcmeCAQAwDAYDVR0TBAUwAwEB/zAN - BgkqhkiG9w0BAQUFAAOCAQEAc06Kgt7ZP6g2TIZgMbFxg6vKwvDL0+2dzF11Onpl - 5sbtkPaNIcj24lQ4vajCrrGKdzHXo9m54BzrdRJ7xDYtw0dbu37l1IZVmiZr12eE - Iay/5YMU+aWP1z70h867ZQ7/7Y4HW345rdiS6EW663oH732wSYNt9kr7/0Uer3KD - 9CuPuOidBacospDaFyfsaJruE99Kd6Eu/w5KLAGG+m0iqENCziDGzVA47TngKz2v - PVA+aokoOyoz3b53qeti77ijatSEoKjxheBWpO+eoJeGq/e49Um3M2ogIX/JAlMa - Inh+vYSYngQB2sx9LGkR9KHaMKNIGCDehk93Xla4pWJx1w== - - - - - - - - - - - - - - - - - - - - - urn:oasis:names:tc:SAML:2.0:nameid-format:transient - urn:mace:shibboleth:1.0:nameIdentifier - - - - - - - - - - - - - - - - - - - - TestShib Two Service Provider - TestShib Two - http://www.testshib.org/testshib-two/ - - - Nate - Klingenstein - ndk@internet2.edu - - - - - - - diff --git a/conf/vagrant/etc/shibboleth/shibboleth2.xml b/conf/vagrant/etc/shibboleth/shibboleth2.xml deleted file mode 100644 index 946e73bdf6a..00000000000 --- a/conf/vagrant/etc/shibboleth/shibboleth2.xml +++ /dev/null @@ -1,85 +0,0 @@ - - - - - - - - - - - - - - - - - - - - SAML2 SAML1 - - - - SAML2 Local - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/conf/vagrant/etc/yum.repos.d/epel-apache-maven.repo b/conf/vagrant/etc/yum.repos.d/epel-apache-maven.repo deleted file mode 100644 index 1e0f8200040..00000000000 --- a/conf/vagrant/etc/yum.repos.d/epel-apache-maven.repo +++ /dev/null @@ -1,15 +0,0 @@ -# Place this file in your /etc/yum.repos.d/ directory - -[epel-apache-maven] -name=maven from apache foundation. -baseurl=http://repos.fedorapeople.org/repos/dchen/apache-maven/epel-$releasever/$basearch/ -enabled=1 -skip_if_unavailable=1 -gpgcheck=0 - -[epel-apache-maven-source] -name=maven from apache foundation. - Source -baseurl=http://repos.fedorapeople.org/repos/dchen/apache-maven/epel-$releasever/SRPMS -enabled=0 -skip_if_unavailable=1 -gpgcheck=0 diff --git a/conf/vagrant/etc/yum.repos.d/shibboleth.repo b/conf/vagrant/etc/yum.repos.d/shibboleth.repo deleted file mode 100644 index adf42185d8a..00000000000 --- a/conf/vagrant/etc/yum.repos.d/shibboleth.repo +++ /dev/null @@ -1,9 +0,0 @@ -[shibboleth] -name=Shibboleth (rockylinux8) -# Please report any problems to https://shibboleth.atlassian.net/jira -type=rpm-md -mirrorlist=https://shibboleth.net/cgi-bin/mirrorlist.cgi/rockylinux8 -gpgcheck=1 -gpgkey=https://shibboleth.net/downloads/service-provider/RPMS/repomd.xml.key - https://shibboleth.net/downloads/service-provider/RPMS/cantor.repomd.xml.key -enabled=1 diff --git a/conf/vagrant/var/lib/pgsql/data/pg_hba.conf b/conf/vagrant/var/lib/pgsql/data/pg_hba.conf deleted file mode 100644 index e3244686066..00000000000 --- a/conf/vagrant/var/lib/pgsql/data/pg_hba.conf +++ /dev/null @@ -1,74 +0,0 @@ -# PostgreSQL Client Authentication Configuration File -# =================================================== -# -# Refer to the "Client Authentication" section in the -# PostgreSQL documentation for a complete description -# of this file. A short synopsis follows. -# -# This file controls: which hosts are allowed to connect, how clients -# are authenticated, which PostgreSQL user names they can use, which -# databases they can access. Records take one of these forms: -# -# local DATABASE USER METHOD [OPTIONS] -# host DATABASE USER CIDR-ADDRESS METHOD [OPTIONS] -# hostssl DATABASE USER CIDR-ADDRESS METHOD [OPTIONS] -# hostnossl DATABASE USER CIDR-ADDRESS METHOD [OPTIONS] -# -# (The uppercase items must be replaced by actual values.) -# -# The first field is the connection type: "local" is a Unix-domain socket, -# "host" is either a plain or SSL-encrypted TCP/IP socket, "hostssl" is an -# SSL-encrypted TCP/IP socket, and "hostnossl" is a plain TCP/IP socket. -# -# DATABASE can be "all", "sameuser", "samerole", a database name, or -# a comma-separated list thereof. -# -# USER can be "all", a user name, a group name prefixed with "+", or -# a comma-separated list thereof. In both the DATABASE and USER fields -# you can also write a file name prefixed with "@" to include names from -# a separate file. -# -# CIDR-ADDRESS specifies the set of hosts the record matches. -# It is made up of an IP address and a CIDR mask that is an integer -# (between 0 and 32 (IPv4) or 128 (IPv6) inclusive) that specifies -# the number of significant bits in the mask. Alternatively, you can write -# an IP address and netmask in separate columns to specify the set of hosts. -# -# METHOD can be "trust", "reject", "md5", "password", "gss", "sspi", "krb5", -# "ident", "pam", "ldap" or "cert". Note that "password" sends passwords -# in clear text; "md5" is preferred since it sends encrypted passwords. -# -# OPTIONS are a set of options for the authentication in the format -# NAME=VALUE. The available options depend on the different authentication -# methods - refer to the "Client Authentication" section in the documentation -# for a list of which options are available for which authentication methods. -# -# Database and user names containing spaces, commas, quotes and other special -# characters must be quoted. Quoting one of the keywords "all", "sameuser" or -# "samerole" makes the name lose its special character, and just match a -# database or username with that name. -# -# This file is read on server startup and when the postmaster receives -# a SIGHUP signal. If you edit the file on a running system, you have -# to SIGHUP the postmaster for the changes to take effect. You can use -# "pg_ctl reload" to do that. - -# Put your actual configuration here -# ---------------------------------- -# -# If you want to allow non-local connections, you need to add more -# "host" records. In that case you will also need to make PostgreSQL listen -# on a non-local interface via the listen_addresses configuration parameter, -# or via the -i or -h command line switches. -# - - - -# TYPE DATABASE USER CIDR-ADDRESS METHOD - -# "local" is for Unix domain socket connections only -local all all trust -# IPv4 local connections: -host all all 127.0.0.1/32 trust -# IPv6 local connections: -host all all ::1/128 trust diff --git a/conf/vagrant/var/www/dataverse/error-documents/503.html b/conf/vagrant/var/www/dataverse/error-documents/503.html deleted file mode 100644 index 95a7dea4107..00000000000 --- a/conf/vagrant/var/www/dataverse/error-documents/503.html +++ /dev/null @@ -1 +0,0 @@ -

Custom "site is unavailable" 503 page.

diff --git a/doc/release-notes/6.0-release-notes.md b/doc/release-notes/6.0-release-notes.md new file mode 100644 index 00000000000..df916216f5b --- /dev/null +++ b/doc/release-notes/6.0-release-notes.md @@ -0,0 +1,300 @@ +# Dataverse 6.0 + +This is a platform upgrade release. Payara, Solr, and Java have been upgraded. No features have been added to the Dataverse software itself. Only a handful of bugs were fixed. + +Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project! + +## Release Highlights (Major Upgrades, Breaking Changes) + +This release contains major upgrades to core components. Detailed upgrade instructions can be found below. + +### Runtime + +- The required Java version has been increased from version 11 to 17. + - See PR #9764 for details. +- Payara application server has been upgraded to version 6.2023.8. + - This is a required update. + - Please note that Payara Community 5 has reached [end of life](https://www.payara.fish/products/payara-platform-product-lifecycle/) + - See PR #9685 and PR #9795 for details. +- Solr has been upgraded to version 9.3.0. + - See PR #9787 for details. +- PostgreSQL 13 remains the tested and supported version. + - That said, the installer and Flyway have been upgraded to support PostgreSQL 14 and 15. See the [PostgreSQL](https://guides.dataverse.org/en/6.0/installation/prerequisites.html#postgresql) section of the Installation Guide and PR #9877 for details. + +### Development + +- Removal of Vagrant and Docker All In One (docker-aio), deprecated in Dataverse v5.14. See PR #9838 and PR #9685 for details. +- All tests have been migrated to use JUnit 5 exclusively from now on. See PR #9796 for details. + +## Installation + +If this is a new installation, please follow our [Installation Guide](https://guides.dataverse.org/en/latest/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/latest/installation/intro.html#getting-help) if you need it! + +Once you are in production, we would be delighted to update our [map of Dataverse installations](https://dataverse.org/installations) around the world to include yours! Please [create an issue](https://github.com/IQSS/dataverse-installations/issues) or email us at support@dataverse.org to join the club! + +You are also very welcome to join the [Global Dataverse Community Consortium](https://dataversecommunity.global) (GDCC). + +## Upgrade Instructions + +Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. + +These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 5.14. + +### Upgrade from Java 11 to Java 17 + +Java 17 is now required for Dataverse. Solr can run under Java 11 or Java 17 but the latter is recommended. In preparation for the Java upgrade, stop both Dataverse/Payara and Solr. + +1. Undeploy Dataverse, if deployed, using the unprivileged service account. + + `sudo -u dataverse /usr/local/payara5/bin/asadmin list-applications` + + `sudo -u dataverse /usr/local/payara5/bin/asadmin undeploy dataverse-5.14` + +1. Stop Payara 5. + + `sudo -u dataverse /usr/local/payara5/bin/asadmin stop-domain` + +1. Stop Solr 8. + + `sudo systemctl stop solr.service` + +1. Install Java 17. + + Assuming you are using RHEL or a derivative such as Rocky Linux: + + `sudo yum install java-17-openjdk` + +1. Set Java 17 as the default. + + Assuming you are using RHEL or a derivative such as Rocky Linux: + + `sudo alternatives --config java` + +1. Test that Java 17 is the default. + + `java -version` + +### Upgrade from Payara 5 to Payara 6 + +If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. + +1. Download Payara 6.2023.8. + + `curl -L -O https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip` + +1. Unzip it to /usr/local (or your preferred location). + + `sudo unzip payara-6.2023.8.zip -d /usr/local/` + +1. Change ownership of the unzipped Payara to your "service" user ("dataverse" by default). + + `sudo chown -R dataverse /usr/local/payara6` + +1. Undeploy Dataverse, if deployed, using the unprivileged service account. + + `sudo -u dataverse /usr/local/payara5/bin/asadmin list-applications` + + `sudo -u dataverse /usr/local/payara5/bin/asadmin undeploy dataverse-5.14` + +1. Stop Payara 5, if running. + + `sudo -u dataverse /usr/local/payara5/bin/asadmin stop-domain` + +1. Copy Dataverse-related lines from Payara 5 to Payara 6 domain.xml. + + `sudo -u dataverse cp /usr/local/payara6/glassfish/domains/domain1/config/domain.xml /usr/local/payara6/glassfish/domains/domain1/config/domain.xml.orig` + + `sudo egrep 'dataverse|doi' /usr/local/payara5/glassfish/domains/domain1/config/domain.xml > lines.txt` + + `sudo vi /usr/local/payara6/glassfish/domains/domain1/config/domain.xml` + + The lines will appear in two sections, examples shown below (but your content will vary). + + Section 1: system properties (under ``) + + ``` + + + + + + ``` + + Note: if you used the Dataverse installer, you won't have a `dataverse.db.password` property. See "Create password aliases" below. + + Section 2: JVM options (under ``, the one under ``, not under ``) + + ``` + -Ddataverse.files.directory=/usr/local/dvn/data + -Ddataverse.files.file.type=file + -Ddataverse.files.file.label=file + -Ddataverse.files.file.directory=/usr/local/dvn/data + -Ddataverse.rserve.host=localhost + -Ddataverse.rserve.port=6311 + -Ddataverse.rserve.user=rserve + -Ddataverse.rserve.password=rserve + -Ddataverse.auth.password-reset-timeout-in-minutes=60 + -Ddataverse.timerServer=true + -Ddataverse.fqdn=dev1.dataverse.org + -Ddataverse.siteUrl=https://dev1.dataverse.org + -Ddataverse.files.storage-driver-id=file + -Ddoi.username=testaccount + -Ddoi.password=notmypassword + -Ddoi.baseurlstring=https://mds.test.datacite.org/ + -Ddoi.dataciterestapiurlstring=https://api.test.datacite.org + ``` + +1. Check the `Xmx` setting in `domain.xml`. + + Under `/usr/local/payara6/glassfish/domains/domain1/config/domain.xml`, check the `Xmx` setting under ``, where you put the JVM options, not the one under ``. Note that there are two such settings, and you want to adjust the one in the stanza with Dataverse options. This sets the JVM heap size; a good rule of thumb is half of your system's total RAM. You may specify the value in MB (`8192m`) or GB (`8g`). + +1. Copy `jhove.conf` and `jhoveConfig.xsd` from Payara 5, edit and change `payara5` to `payara6`. + + `sudo cp /usr/local/payara5/glassfish/domains/domain1/config/jhove* /usr/local/payara6/glassfish/domains/domain1/config/` + + `sudo chown dataverse /usr/local/payara6/glassfish/domains/domain1/config/jhove*` + + `sudo -u dataverse vi /usr/local/payara6/glassfish/domains/domain1/config/jhove.conf` + +1. Copy logos from Payara 5 to Payara 6. + + These logos are for collections (dataverses). + + `sudo -u dataverse cp -r /usr/local/payara5/glassfish/domains/domain1/docroot/logos /usr/local/payara6/glassfish/domains/domain1/docroot` + +1. If you are using Make Data Count (MDC), edit :MDCLogPath. + + Your `:MDCLogPath` database setting might be pointing to a Payara 5 directory such as `/usr/local/payara5/glassfish/domains/domain1/logs`. If so, edit this to be Payara 6. You'll probably want to copy your logs over as well. + +1. Update systemd unit file (or other init system) from `/usr/local/payara5` to `/usr/local/payara6`, if applicable. + +1. Start Payara. + + `sudo -u dataverse /usr/local/payara6/bin/asadmin start-domain` + +1. Create a Java mail resource, replacing "localhost" for mailhost with your mail relay server, and replacing "localhost" for fromaddress with the FQDN of your Dataverse server. + + `sudo -u dataverse /usr/local/payara6/bin/asadmin create-javamail-resource --mailhost "localhost" --mailuser "dataversenotify" --fromaddress "do-not-reply@localhost" mail/notifyMailSession` + +1. Create password aliases for your database, rserve and datacite jvm-options, if you're using them. + + `echo "AS_ADMIN_ALIASPASSWORD=yourDBpassword" > /tmp/dataverse.db.password.txt` + + `sudo -u dataverse /usr/local/payara6/bin/asadmin create-password-alias --passwordfile /tmp/dataverse.db.password.txt` + + When you are prompted "Enter the value for the aliasname operand", enter `dataverse.db.password` + + You should see "Command create-password-alias executed successfully." + + You'll want to perform similar commands for `rserve_password_alias` and `doi_password_alias` if you're using Rserve and/or DataCite. + +1. Enable workaround for FISH-7722. + + The following workaround is for https://github.com/payara/Payara/issues/6337 + + `sudo -u dataverse /usr/local/payara6/bin/asadmin create-jvm-options --add-opens=java.base/java.io=ALL-UNNAMED` + +1. Create the network listener on port 8009. + + `sudo -u dataverse /usr/local/payara6/bin/asadmin create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector` + +1. Deploy the Dataverse 6.0 war file. + + `sudo -u dataverse /usr/local/payara6/bin/asadmin deploy /path/to/dataverse-6.0.war` + +1. Check that you get a version number from Dataverse. + + This is just a sanity check that Dataverse has been deployed properly. + + `curl http://localhost:8080/api/info/version` + +1. Perform one final Payara restart to ensure that timers are initialized properly. + + `sudo -u dataverse /usr/local/payara6/bin/asadmin stop-domain` + + `sudo -u dataverse /usr/local/payara6/bin/asadmin start-domain` + +### Upgrade from Solr 8 to 9 + +Solr has been upgraded to Solr 9. You must install Solr fresh and reindex. You cannot use your old `schema.xml` because the format has changed. + +The instructions below are copied from https://guides.dataverse.org/en/6.0/installation/prerequisites.html#installing-solr and tweaked a bit for an upgrade scenario. + +We assume that you already have a user called "solr" (from the instructions above), added during your initial installation of Solr. We also assume that you have already stopped Solr 8 as explained in the instructions above about upgrading Java. + +1. Become the "solr" user and then download and configure Solr. + + `su - solr` + + `cd /usr/local/solr` + + `wget https://archive.apache.org/dist/solr/solr/9.3.0/solr-9.3.0.tgz` + + `tar xvzf solr-9.3.0.tgz` + + `cd solr-9.3.0` + + `cp -r server/solr/configsets/_default server/solr/collection1` + +1. Unzip "dvinstall.zip" from this release. Unzip it into /tmp. Then copy the following files into place. + + `cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf` + + `cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf` + +1. A Dataverse installation requires a change to the jetty.xml file that ships with Solr. + + Edit `/usr/local/solr/solr-9.3.0/server/etc/jetty.xml`, increasing `requestHeaderSize` from `8192` to `102400` + +1. Tell Solr to create the core "collection1" on startup. + + `echo "name=collection1" > /usr/local/solr/solr-9.3.0/server/solr/collection1/core.properties` + +1. Update your init script. + + Your init script may be located at `/etc/systemd/system/solr.service`, for example. Update the path to Solr to be `/usr/local/solr/solr-9.3.0`. + +1. Start Solr using your init script and check collection1. + + The collection1 check below should print out fields Dataverse uses like "dsDescription". + + `systemctl start solr.service` + + `curl http://localhost:8983/solr/collection1/schema/fields` + +1. Reindex Solr. + + For details, see https://guides.dataverse.org/en/6.0/admin/solr-search-index.html but here is the reindex command: + + `curl http://localhost:8080/api/admin/index` + +1. If you have custom metadata blocks installed, you must update your Solr `schema.xml` to include your custom fields. + + For details, please see https://guides.dataverse.org/en/6.0/admin/metadatacustomization.html#updating-the-solr-schema + + At a high level you will be copying custom fields from the output of http://localhost:8080/api/admin/index/solr/schema or using a script to automate this. + +## Potential Archiver Incompatibilities with Payara 6 + +The [Google Cloud and DuraCloud archivers](https://guides.dataverse.org/en/5.14/installation/config.html#bagit-export) may not work in Dataverse 6.0. + +This is due to the archivers' dependence on libraries that include classes in `javax.* packages` that are no longer available. If these classes are actually used when the archivers run, the archivers would fail. As these two archivers require additional setup, they have not been tested in 6.0. Community members using these archivers or considering their use are encouraged to test them with 6.0 and report any errors and/or provide fixes for them that can be included in future releases. + +## Bug Fix for Dataset Templates with Custom Terms of Use + +A bug was fixed for the following scenario: + +- Create a template with custom terms. +- Set that template as the default. +- Try to create a dataset. +- A 500 error appears before the form to create dataset is even shown. + +For more details, see issue #9825 and PR #9892 + +## Complete List of Changes + +For the complete list of code changes in this release, see the [6.0 Milestone](https://github.com/IQSS/dataverse/milestone/109?closed=1) in GitHub. + +## Getting Help + +For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org. diff --git a/doc/release-notes/8094-java-17.md b/doc/release-notes/8094-java-17.md deleted file mode 100644 index f3c81145465..00000000000 --- a/doc/release-notes/8094-java-17.md +++ /dev/null @@ -1 +0,0 @@ -Java 17 or higher is now required. diff --git a/doc/release-notes/8305-payara6-ee10-v3.md b/doc/release-notes/8305-payara6-ee10-v3.md deleted file mode 100644 index 94369e0211f..00000000000 --- a/doc/release-notes/8305-payara6-ee10-v3.md +++ /dev/null @@ -1,5 +0,0 @@ -Payara has been updated from version 5 to 6. - -Developers, you are encouraged to upgrade to Payara 6 immediately. - -Sysadmins, instructions on how to upgrade production installations will be written as part of https://github.com/IQSS/dataverse/issues/9340 diff --git a/doc/release-notes/9340-payara5to6.md b/doc/release-notes/9340-payara5to6.md deleted file mode 100644 index b2e6702f5e4..00000000000 --- a/doc/release-notes/9340-payara5to6.md +++ /dev/null @@ -1,122 +0,0 @@ -## Upgrade from Payara 5 to Payara 6 - -1. Download Payara 6.2023.8 as of this writing: - - `curl -L -O https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip` - -1. Unzip it to /usr/local (or your preferred location): - - `sudo unzip payara-6.2023.8.zip -d /usr/local/` - -1. Change ownership of the unzipped Payara to your "service" user ("dataverse" by default): - - `sudo chown -R dataverse /usr/local/payara6` - -1. Undeploy Dataverse (if deployed, using the unprivileged service account. Version 5.14 is assumed in the example below): - - `sudo -u dataverse /usr/local/payara5/bin/asadmin list-applications` - - `sudo -u dataverse /usr/local/payara5/bin/asadmin undeploy dataverse-5.14` - -1. Stop Payara 5: - - `sudo -u dataverse /usr/local/payara5/bin/asadmin stop-domain` - -1. Copy Dataverse-related lines from Payara 5 to Payara 6 domain.xml: - - `sudo -u dataverse cp /usr/local/payara6/glassfish/domains/domain1/config/domain.xml /usr/local/payara6/glassfish/domains/domain1/config/domain.xml.orig` - - `sudo egrep 'dataverse|doi' /usr/local/payara5/glassfish/domains/domain1/config/domain.xml > lines.txt` - - `sudo vi /usr/local/payara6/glassfish/domains/domain1/config/domain.xml` - - The lines will appear in two sections, examples shown below (but your content will vary). - - Section 1: system properties (under ``) - - ``` - - - - - - ``` - - Note: if you used the Dataverse installer, you won't have a `dataverse.db.password` property. See "Create password aliases" below. - - Section 2: JVM options (under ``, the one under ``, not under ``) - - ``` - -Ddataverse.files.directory=/usr/local/dvn/data - -Ddataverse.files.file.type=file - -Ddataverse.files.file.label=file - -Ddataverse.files.file.directory=/usr/local/dvn/data - -Ddataverse.rserve.host=localhost - -Ddataverse.rserve.port=6311 - -Ddataverse.rserve.user=rserve - -Ddataverse.rserve.password=rserve - -Ddataverse.auth.password-reset-timeout-in-minutes=60 - -Ddataverse.timerServer=true - -Ddataverse.fqdn=dev1.dataverse.org - -Ddataverse.siteUrl=https://dev1.dataverse.org - -Ddataverse.files.storage-driver-id=file - -Ddoi.username=testaccount - -Ddoi.password=notmypassword - -Ddoi.baseurlstring=https://mds.test.datacite.org/ - -Ddoi.dataciterestapiurlstring=https://api.test.datacite.org - ``` - -1. Check the `Xmx` setting in `/usr/local/payara6/glassfish/domains/domain1/config/domain.xml`. (The one under ``, where you put the JVM options, not the one under ``.) Note that there are two such settings, and you want to adjust the one in the stanza with Dataverse options. This sets the JVM heap size; a good rule of thumb is half of your system's total RAM. You may specify the value in MB (`8192m`) or GB (`8g`). - -1. Copy jhove.conf and jhoveConfig.xsd from Payara 5, edit and change payara5 to payara6 - - `sudo cp /usr/local/payara5/glassfish/domains/domain1/config/jhove* /usr/local/payara6/glassfish/domains/domain1/config/` - - `sudo chown dataverse /usr/local/payara6/glassfish/domains/domain1/config/jhove*` - - `sudo -u dataverse vi /usr/local/payara6/glassfish/domains/domain1/config/jhove.conf` - -1. Update systemd unit file (or other init system) from `/usr/local/payara5` to `/usr/local/payara6`, if applicable. - -1. Start Payara: - - `sudo -u dataverse /usr/local/payara6/bin/asadmin start-domain` - -1. Create a Java mail resource, replacing "localhost" for mailhost with your mail relay server, and replacing "localhost" for fromaddress with the FQDN of your Dataverse server: - - `sudo -u dataverse /usr/local/payara6/bin/asadmin create-javamail-resource --mailhost "localhost" --mailuser "dataversenotify" --fromaddress "do-not-reply@localhost" mail/notifyMailSession` - -1. Create password aliases for your database, rserve and datacite jvm-options, if you're using them: - - ``` - $ echo "AS_ADMIN_ALIASPASSWORD=yourDBpassword" > /tmp/dataverse.db.password.txt - $ sudo -u dataverse /usr/local/payara6/bin/asadmin create-password-alias --passwordfile /tmp/dataverse.db.password.txt - Enter the value for the aliasname operand> dataverse.db.password - Command create-password-alias executed successfully. - ``` - - You'll want to perform similar commands for `rserve_password_alias` and `doi_password_alias` if you're using Rserve and/or Datacite. - -1. Enable workaround for FISH-7722: - - The following workaround is for https://github.com/payara/Payara/issues/6337 - - `sudo -u dataverse /usr/local/payara6/bin/asadmin create-jvm-options --add-opens=java.base/java.io=ALL-UNNAMED` - -1. Create the network listener on port 8009 - - `sudo -u dataverse /usr/local/payara6/bin/asadmin create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector` - -1. Deploy the Dataverse 6.0 warfile: - - `sudo -u dataverse /usr/local/payara6/bin/asadmin deploy /path/to/dataverse-6.0.war` - -1. Check that you get a version number from Dataverse: - - `curl http://localhost:8080/api/info/version` - -1. Perform one final Payara restart to ensure that timers are initialized properly: - - `sudo -u dataverse /usr/local/payara6/bin/asadmin stop-domain` - - `sudo -u dataverse /usr/local/payara6/bin/asadmin start-domain` diff --git a/doc/release-notes/9782-juni5-transition.md b/doc/release-notes/9782-juni5-transition.md deleted file mode 100644 index b7ffcc0de0d..00000000000 --- a/doc/release-notes/9782-juni5-transition.md +++ /dev/null @@ -1,7 +0,0 @@ -# Migrating all test to JUnit 5 -With this release, we transition all of our test cases (see `src/test/`) to use JUnit 5 only. -Moving forward from JUnit 4 will allow writing tests in more concise and easier ways. -The tests themselves have not been altered, but updated to match JUnit 5 ways. -They have not been extended or dropped coverage; this is mostly a preparation of things to come in the future. -If you are writing tests in JUnit 4 in your feature branches, you need to migrate. -The development guides section of testing has been updated as well. diff --git a/doc/shib/shib.md b/doc/shib/shib.md index 2c178a93f35..9cff6d827e7 100644 --- a/doc/shib/shib.md +++ b/doc/shib/shib.md @@ -82,11 +82,7 @@ Run `service httpd restart`. ## Update/verify files under /etc/shibboleth -For /etc/shibboleth/shibboleth2.xml use the version from https://github.com/IQSS/dataverse/blob/master/conf/vagrant/etc/shibboleth/shibboleth2.xml but replace "pdurbin.pagekite.me" with the "shibtest.dataverse.org". - -Put https://github.com/IQSS/dataverse/blob/master/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml at /etc/shibboleth/dataverse-idp-metadata.xml - -Put https://github.com/IQSS/dataverse/blob/master/conf/vagrant/etc/shibboleth/attribute-map.xml at +Get files from the Installation Guide. After making these changes, run `service shibd restart` and `service httpd restart`. diff --git a/scripts/vagrant/counter-processor-config.yaml b/doc/sphinx-guides/source/_static/developers/counter-processor-config.yaml similarity index 100% rename from scripts/vagrant/counter-processor-config.yaml rename to doc/sphinx-guides/source/_static/developers/counter-processor-config.yaml diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr index 7ca04cdff3f..9cf8902eb14 100755 --- a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr +++ b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr @@ -5,7 +5,7 @@ # chkconfig: 35 92 08 # description: Starts and stops Apache Solr -SOLR_DIR="/usr/local/solr/solr-8.11.1" +SOLR_DIR="/usr/local/solr/solr-9.3.0" SOLR_COMMAND="bin/solr" SOLR_ARGS="-m 1g -j jetty.host=127.0.0.1" SOLR_USER=solr diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service index d89ee108377..0b8a8528490 100644 --- a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service +++ b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service @@ -5,9 +5,9 @@ After = syslog.target network.target remote-fs.target nss-lookup.target [Service] User = solr Type = forking -WorkingDirectory = /usr/local/solr/solr-8.11.1 -ExecStart = /usr/local/solr/solr-8.11.1/bin/solr start -m 1g -j "jetty.host=127.0.0.1" -ExecStop = /usr/local/solr/solr-8.11.1/bin/solr stop +WorkingDirectory = /usr/local/solr/solr-9.3.0 +ExecStart = /usr/local/solr/solr-9.3.0/bin/solr start -m 1g -j "jetty.host=127.0.0.1" +ExecStop = /usr/local/solr/solr-9.3.0/bin/solr stop LimitNOFILE=65000 LimitNPROC=65000 Restart=on-failure diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index 53bc32eca3d..4f737bd730b 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -414,12 +414,9 @@ Setting Up a Dev Environment for Testing You have several options for setting up a dev environment for testing metadata block changes: - Docker: See :doc:`/container/index`. -- Vagrant: See the :doc:`/developers/tools` section of the Developer Guide. - AWS deployment: See the :doc:`/developers/deployment` section of the Developer Guide. - Full dev environment: See the :doc:`/developers/dev-environment` section of the Developer Guide. -To get a clean environment in Vagrant, you'll be running ``vagrant destroy``. In Docker, you'll use ``docker rm``. For a full dev environment or AWS installation, you might find ``rebuild`` and related scripts at ``scripts/deploy/phoenix.dataverse.org`` useful. - Editing TSV files ~~~~~~~~~~~~~~~~~ @@ -516,7 +513,7 @@ the Solr schema configuration, including any enabled metadata schemas: ``curl "http://localhost:8080/api/admin/index/solr/schema"`` -You can use :download:`update-fields.sh <../../../../conf/solr/8.11.1/update-fields.sh>` to easily add these to the +You can use :download:`update-fields.sh <../../../../conf/solr/9.3.0/update-fields.sh>` to easily add these to the Solr schema you installed for your Dataverse installation. The script needs a target XML file containing your Solr schema. (See the :doc:`/installation/prerequisites/` section of @@ -540,7 +537,7 @@ from some place else than your Dataverse installation). Please note that reconfigurations of your Solr index might require a re-index. Usually release notes indicate a necessary re-index, but for your custom metadata you will need to keep track on your own. -Please note also that if you are going to make a pull request updating ``conf/solr/8.11.1/schema.xml`` with fields you have +Please note also that if you are going to make a pull request updating ``conf/solr/9.3.0/schema.xml`` with fields you have added, you should first load all the custom metadata blocks in ``scripts/api/data/metadatablocks`` (including ones you don't care about) to create a complete list of fields. (This might change in the future.) diff --git a/doc/sphinx-guides/source/api/getting-started.rst b/doc/sphinx-guides/source/api/getting-started.rst index 544f0921bd7..a6f6c259a25 100644 --- a/doc/sphinx-guides/source/api/getting-started.rst +++ b/doc/sphinx-guides/source/api/getting-started.rst @@ -11,7 +11,7 @@ Servers You Can Test With Rather than using a production Dataverse installation, API users are welcome to use http://demo.dataverse.org for testing. You can email support@dataverse.org if you have any trouble with this server. -If you would rather have full control over your own test server, deployments to AWS, Docker, Vagrant, and more are covered in the :doc:`/developers/index` and the :doc:`/installation/index`. +If you would rather have full control over your own test server, deployments to AWS, Docker, and more are covered in the :doc:`/developers/index` and the :doc:`/installation/index`. Getting an API Token -------------------- diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index 2c2ddf1bdf6..7ff17eb45ed 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -66,9 +66,9 @@ # built documents. # # The short X.Y version. -version = '5.14' +version = '6.0' # The full version, including alpha/beta/rc tags. -release = '5.14' +release = '6.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/sphinx-guides/source/developers/classic-dev-env.rst b/doc/sphinx-guides/source/developers/classic-dev-env.rst index 6feca558267..062a1bb36f3 100755 --- a/doc/sphinx-guides/source/developers/classic-dev-env.rst +++ b/doc/sphinx-guides/source/developers/classic-dev-env.rst @@ -134,7 +134,7 @@ On Linux, you should just install PostgreSQL using your favorite package manager Install Solr ^^^^^^^^^^^^ -`Solr `_ 8.11.1 is required. +`Solr `_ 9.3.0 is required. To install Solr, execute the following commands: @@ -144,25 +144,25 @@ To install Solr, execute the following commands: ``cd /usr/local/solr`` -``curl -O http://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz`` +``curl -O http://archive.apache.org/dist/solr/solr/9.3.0/solr-9.3.0.tgz`` -``tar xvfz solr-8.11.1.tgz`` +``tar xvfz solr-9.3.0.tgz`` -``cd solr-8.11.1/server/solr`` +``cd solr-9.3.0/server/solr`` ``cp -r configsets/_default collection1`` -``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/8.11.1/schema.xml`` +``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/9.3.0/schema.xml`` -``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/8.11.1/schema_dv_mdb_fields.xml`` +``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/9.3.0/schema_dv_mdb_fields.xml`` ``mv schema*.xml collection1/conf`` -``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/8.11.1/solrconfig.xml`` +``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/9.3.0/solrconfig.xml`` ``mv solrconfig.xml collection1/conf/solrconfig.xml`` -``cd /usr/local/solr/solr-8.11.1`` +``cd /usr/local/solr/solr-9.3.0`` (Please note that the extra jetty argument below is a security measure to limit connections to Solr to only your computer. For extra security, run a firewall.) diff --git a/doc/sphinx-guides/source/developers/dependencies.rst b/doc/sphinx-guides/source/developers/dependencies.rst index 65edfa3ffac..0208c49f90a 100644 --- a/doc/sphinx-guides/source/developers/dependencies.rst +++ b/doc/sphinx-guides/source/developers/dependencies.rst @@ -344,8 +344,7 @@ Repositories ------------ Maven receives all dependencies from *repositories*. These can be public like `Maven Central `_ -and others, but you can also use a private repository on premises or in the cloud. Last but not least, you can use -local repositories, which can live next to your application code (see ``local_lib`` dir within the Dataverse Software codebase). +and others, but you can also use a private repository on premises or in the cloud. Repositories are defined within the Dataverse Software POM like this: @@ -364,11 +363,6 @@ Repositories are defined within the Dataverse Software POM like this: http://repository.primefaces.org default - - dvn.private - Local repository for hosting jars not available from network repositories. - file://${project.basedir}/local_lib - You can also add repositories to your local Maven settings, see `docs `_. diff --git a/doc/sphinx-guides/source/developers/index.rst b/doc/sphinx-guides/source/developers/index.rst index c77ddc13519..3ac9e955ea2 100755 --- a/doc/sphinx-guides/source/developers/index.rst +++ b/doc/sphinx-guides/source/developers/index.rst @@ -27,6 +27,7 @@ Developer Guide deployment containers making-releases + making-library-releases metadataexport tools unf/index diff --git a/doc/sphinx-guides/source/developers/intro.rst b/doc/sphinx-guides/source/developers/intro.rst index 7f4e8c1ba34..4a64c407fc1 100755 --- a/doc/sphinx-guides/source/developers/intro.rst +++ b/doc/sphinx-guides/source/developers/intro.rst @@ -52,7 +52,9 @@ Related Guides If you are a developer who wants to make use of the Dataverse Software APIs, please see the :doc:`/api/index`. If you have front-end UI questions, please see the :doc:`/style/index`. -If you are a sysadmin who likes to code, you may be interested in hacking on installation scripts mentioned in the :doc:`/installation/index`. We validate the installation scripts with :doc:`/developers/tools` such as `Vagrant `_ and Docker (see the :doc:`containers` section). +If you are a sysadmin who likes to code, you may be interested in hacking on installation scripts mentioned in the :doc:`/installation/index`. + +If you are a Docker enthusiasts, please check out the :doc:`/container/index`. Related Projects ---------------- diff --git a/doc/sphinx-guides/source/developers/make-data-count.rst b/doc/sphinx-guides/source/developers/make-data-count.rst index ada0f13bb2f..8eaa5c0d7f8 100644 --- a/doc/sphinx-guides/source/developers/make-data-count.rst +++ b/doc/sphinx-guides/source/developers/make-data-count.rst @@ -30,13 +30,11 @@ Full Setup The recommended way to work on the Make Data Count feature is to spin up an EC2 instance that has both the Dataverse Software and Counter Processor installed. Go to the :doc:`deployment` page for details on how to spin up an EC2 instance and make sure that your Ansible file is configured to install Counter Processor before running the "create" script. -(Alternatively, you can try installing Counter Processor in Vagrant. :download:`setup-counter-processor.sh <../../../../scripts/vagrant/setup-counter-processor.sh>` might help you get it installed.) - After you have spun to your EC2 instance, set ``:MDCLogPath`` so that the Dataverse installation creates a log for Counter Processor to operate on. For more on this database setting, see the :doc:`/installation/config` section of the Installation Guide. Next you need to have the Dataverse installation add some entries to the log that Counter Processor will operate on. To do this, click on some published datasets and download some files. -Next you should run Counter Processor to convert the log into a SUSHI report, which is in JSON format. Before running Counter Processor, you need to put a configuration file into place. As a starting point use :download:`counter-processor-config.yaml <../../../../scripts/vagrant/counter-processor-config.yaml>` and edit the file, paying particular attention to the following settings: +Next you should run Counter Processor to convert the log into a SUSHI report, which is in JSON format. Before running Counter Processor, you need to put a configuration file into place. As a starting point use :download:`counter-processor-config.yaml <../_static/developers/counter-processor-config.yaml>` and edit the file, paying particular attention to the following settings: - ``log_name_pattern`` You might want something like ``/usr/local/payara6/glassfish/domains/domain1/logs/counter_(yyyy-mm-dd).log`` - ``year_month`` You should probably set this to the current month. diff --git a/doc/sphinx-guides/source/developers/making-library-releases.rst b/doc/sphinx-guides/source/developers/making-library-releases.rst new file mode 100755 index 00000000000..63b6eeb1c2a --- /dev/null +++ b/doc/sphinx-guides/source/developers/making-library-releases.rst @@ -0,0 +1,93 @@ +======================= +Making Library Releases +======================= + +.. contents:: |toctitle| + :local: + +Introduction +------------ + +Note: See :doc:`making-releases` for Dataverse itself. + +We release Java libraries to Maven Central that are used by Dataverse (and perhaps `other `_ `software `_!): + +- https://central.sonatype.com/namespace/org.dataverse +- https://central.sonatype.com/namespace/io.gdcc + +We release JavaScript/TypeScript libraries to npm: + +- https://www.npmjs.com/package/@iqss/dataverse-design-system + +Maven Central (Java) +-------------------- + +From the perspective of the Maven Central, we are both `producers `_ because we publish/release libraries there and `consumers `_ because we pull down those libraries (and many others) when we build Dataverse. + +Releasing Existing Libraries to Maven Central +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If you need to release an existing library, all the setup should be done already. The steps below assume that GitHub Actions are in place to do the heavy lifting for you, such as signing artifacts with GPG. + +Releasing a Snapshot Version to Maven Central +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +`Snapshot `_ releases are published automatically through GitHub Actions (e.g. through a `snapshot workflow `_ for the SWORD library) every time a pull request is merged (or the default branch, typically ``main``, is otherwise updated). + +That is to say, to make a snapshot release, you only need to get one or more commits into the default branch. + +Releasing a Release (Non-Snapshot) Version to Maven Central +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +From a pom.xml it may not be apparent that snapshots like ``6.0-SNAPSHOT`` might be changing under your feet. Browsing the snapshot repository (e.g. our `UNF 6.0-SNAPSHOT `_), may reveal versions changing over time. To finalize the code and stop it from changing, we publish/release what Maven calls a "`release version `_". This will remove ``-SNAPSHOT`` from the version (through an ``mvn`` command). + +Non-snapshot releases (`release `_ versions) are published automatically through GitHub Actions (e.g. through a `release workflow `_), kicked off locally by an ``mvn`` command that invokes the `Maven Release Plugin `_. + +First, run a clean: + +``mvn release:clean`` + +Then run a prepare: + +``mvn release:prepare`` + +The prepare step is interactive. You will be prompted for the following information: + +- the release version (e.g. `2.0.0 `_) +- the git tag to create and push (e.g. `sword2-server-2.0.0 `_) +- the next development (snapshot) version (e.g. `2.0.1-SNAPSHOT `_) + +These examples from the SWORD library. Below is what to expect from the interactive session. In many cases, you can just hit enter to accept the defaults. + +.. code-block:: bash + + [INFO] 5/17 prepare:map-release-versions + What is the release version for "SWORD v2 Common Server Library (forked)"? (sword2-server) 2.0.0: : + [INFO] 6/17 prepare:input-variables + What is the SCM release tag or label for "SWORD v2 Common Server Library (forked)"? (sword2-server) sword2-server-2.0.0: : + [INFO] 7/17 prepare:map-development-versions + What is the new development version for "SWORD v2 Common Server Library (forked)"? (sword2-server) 2.0.1-SNAPSHOT: : + [INFO] 8/17 prepare:rewrite-poms-for-release + +It can take some time for the jar to be visible on Maven Central. You can start by looking on the repo1 server, like this: https://repo1.maven.org/maven2/io/gdcc/sword2-server/2.0.0/ + +Don't bother putting the new version in a pom.xml until you see it on repo1. + +Note that the next snapshot release should be available as well, like this: https://s01.oss.sonatype.org/content/groups/staging/io/gdcc/sword2-server/2.0.1-SNAPSHOT/ + +Releasing a New Library to Maven Central +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +At a high level: + +- Use an existing pom.xml as a starting point. +- Use existing GitHub Actions workflows as a starting point. +- Create secrets in the new library's GitHub repo used by the workflow. +- If you need an entire new namespace, look at previous issues such as https://issues.sonatype.org/browse/OSSRH-94575 and https://issues.sonatype.org/browse/OSSRH-94577 + +npm (JavaScript/TypeScript) +--------------------------- + +Currently, publishing `@iqss/dataverse-design-system `_ to npm done manually. We plan to automate this as part of https://github.com/IQSS/dataverse-frontend/issues/140 + +https://www.npmjs.com/package/js-dataverse is the previous 1.0 version of js-dataverse. No 1.x releases are planned. We plan to publish 2.0 (used by the new frontend) as discussed in https://github.com/IQSS/dataverse-frontend/issues/13 \ No newline at end of file diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index a2575bb5f50..23c4773a06e 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -8,6 +8,8 @@ Making Releases Introduction ------------ +Note: See :doc:`making-library-releases` for how to publish our libraries to Maven Central. + See :doc:`version-control` for background on our branching strategy. The steps below describe making both regular releases and hotfix releases. diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 0f2eca84b62..acaeccf4f23 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -506,7 +506,6 @@ Browser-Based Testing Installation Testing ~~~~~~~~~~~~~~~~~~~~ -- Run `vagrant up` on a server to test the installer - Work with @donsizemore to automate testing of https://github.com/GlobalDataverseCommunityConsortium/dataverse-ansible Future Work on Load/Performance Testing diff --git a/doc/sphinx-guides/source/developers/tools.rst b/doc/sphinx-guides/source/developers/tools.rst index 238db7ce7b0..a21becd14cf 100755 --- a/doc/sphinx-guides/source/developers/tools.rst +++ b/doc/sphinx-guides/source/developers/tools.rst @@ -25,21 +25,6 @@ Maven With Maven installed you can run ``mvn package`` and ``mvn test`` from the command line. It can be downloaded from https://maven.apache.org -.. _vagrant: - -Vagrant -+++++++ - -Vagrant allows you to spin up a virtual machine running the Dataverse Software on your development workstation. You'll need to install Vagrant from https://www.vagrantup.com and VirtualBox from https://www.virtualbox.org. - -We assume you have already cloned the repo from https://github.com/IQSS/dataverse as explained in the :doc:`/developers/dev-environment` section. - -From the root of the git repo (where the ``Vagrantfile`` is), run ``vagrant up`` and eventually you should be able to reach a Dataverse installation at http://localhost:8888 (the ``forwarded_port`` indicated in the ``Vagrantfile``). - -Please note that running ``vagrant up`` for the first time should run the ``downloads/download.sh`` script for you to download required software such as an app server, Solr, etc. However, these dependencies change over time so it's a place to look if ``vagrant up`` was working but later fails. - -On Windows if you see an error like ``/usr/bin/python^M: bad interpreter`` you might need to run ``dos2unix`` on the installation scripts. - PlantUML ++++++++ diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 0d1610be8bf..1847f1b8f63 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -97,10 +97,14 @@ Also note that Payara may utilize more than the default number of file descripto PostgreSQL ---------- +PostgreSQL 13 is recommended because it's the version we test against. Version 10 or higher is required because that's what's `supported by Flyway `_, which we use for database migrations. + +You are welcome to experiment with newer versions of PostgreSQL, but please note that as of PostgreSQL 15, permissions have been restricted on the ``public`` schema (`release notes `_, `EDB blog post `_, `Crunchy Data blog post `_). The Dataverse installer has been updated to restore the old permissions, but this may not be a long term solution. + Installing PostgreSQL ===================== -The application has been tested with PostgreSQL versions up to 13 and version 10+ is required. We recommend installing the latest version that is available for your OS distribution. *For example*, to install PostgreSQL 13 under RHEL7/derivative:: +*For example*, to install PostgreSQL 13 under RHEL7/derivative:: # yum install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-7-x86_64/pgdg-redhat-repo-latest.noarch.rpm # yum makecache fast @@ -154,12 +158,12 @@ Configuring Database Access for the Dataverse Installation (and the Dataverse So Solr ---- -The Dataverse Software search index is powered by Solr. +The Dataverse software search index is powered by Solr. Supported Versions ================== -The Dataverse Software has been tested with Solr version 8.11.1. Future releases in the 8.x series are likely to be compatible; however, this cannot be confirmed until they are officially tested. Major releases above 8.x (e.g. 9.x) are not supported. +The Dataverse software has been tested with Solr version 9.3.0. Future releases in the 9.x series are likely to be compatible. Please get in touch (:ref:`support`) if you are having trouble with a newer version. Installing Solr =============== @@ -174,19 +178,19 @@ Become the ``solr`` user and then download and configure Solr:: su - solr cd /usr/local/solr - wget https://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz - tar xvzf solr-8.11.1.tgz - cd solr-8.11.1 + wget https://archive.apache.org/dist/solr/solr/9.3.0/solr-9.3.0.tgz + tar xvzf solr-9.3.0.tgz + cd solr-9.3.0 cp -r server/solr/configsets/_default server/solr/collection1 You should already have a "dvinstall.zip" file that you downloaded from https://github.com/IQSS/dataverse/releases . Unzip it into ``/tmp``. Then copy the files into place:: - cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-8.11.1/server/solr/collection1/conf - cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-8.11.1/server/solr/collection1/conf + cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf + cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf Note: The Dataverse Project team has customized Solr to boost results that come from certain indexed elements inside the Dataverse installation, for example prioritizing results from Dataverse collections over Datasets. If you would like to remove this, edit your ``solrconfig.xml`` and remove the ```` element and its contents. If you have ideas about how this boosting could be improved, feel free to contact us through our Google Group https://groups.google.com/forum/#!forum/dataverse-dev . -A Dataverse installation requires a change to the ``jetty.xml`` file that ships with Solr. Edit ``/usr/local/solr/solr-8.11.1/server/etc/jetty.xml`` , increasing ``requestHeaderSize`` from ``8192`` to ``102400`` +A Dataverse installation requires a change to the ``jetty.xml`` file that ships with Solr. Edit ``/usr/local/solr/solr-9.3.0/server/etc/jetty.xml`` , increasing ``requestHeaderSize`` from ``8192`` to ``102400`` Solr will warn about needing to increase the number of file descriptors and max processes in a production environment but will still run with defaults. We have increased these values to the recommended levels by adding ulimit -n 65000 to the init script, and the following to ``/etc/security/limits.conf``:: @@ -205,7 +209,7 @@ Solr launches asynchronously and attempts to use the ``lsof`` binary to watch fo Finally, you need to tell Solr to create the core "collection1" on startup:: - echo "name=collection1" > /usr/local/solr/solr-8.11.1/server/solr/collection1/core.properties + echo "name=collection1" > /usr/local/solr/solr-9.3.0/server/solr/collection1/core.properties Solr Init Script ================ diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst index d5ffb2acb66..2000a2097f0 100755 --- a/doc/sphinx-guides/source/versions.rst +++ b/doc/sphinx-guides/source/versions.rst @@ -7,7 +7,8 @@ Dataverse Software Documentation Versions This list provides a way to refer to the documentation for previous and future versions of the Dataverse Software. In order to learn more about the updates delivered from one version to another, visit the `Releases `__ page in our GitHub repo. - pre-release `HTML (not final!) `__ and `PDF (experimental!) `__ built from the :doc:`develop ` branch :doc:`(how to contribute!) ` -- 5.14 +- 6.0 +- `5.14 `__ - `5.13 `__ - `5.12.1 `__ - `5.12 `__ diff --git a/downloads/.gitignore b/downloads/.gitignore deleted file mode 100644 index c21ee37922f..00000000000 --- a/downloads/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -payara-6.2023.6.zip -solr-7.3.0.tgz -weld-osgi-bundle-2.2.10.Final-glassfish4.jar -schemaSpy_5.0.0.jar diff --git a/downloads/download.sh b/downloads/download.sh deleted file mode 100755 index c5cf4a5f17b..00000000000 --- a/downloads/download.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -curl -L -O https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip -curl -L -O https://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz -curl -L -O https://search.maven.org/remotecontent?filepath=org/jboss/weld/weld-osgi-bundle/2.2.10.Final/weld-osgi-bundle-2.2.10.Final-glassfish4.jar -curl -s -L http://sourceforge.net/projects/schemaspy/files/schemaspy/SchemaSpy%205.0.0/schemaSpy_5.0.0.jar/download > schemaSpy_5.0.0.jar diff --git a/downloads/stata-13-test-files/Stata14TestFile.dta b/downloads/stata-13-test-files/Stata14TestFile.dta deleted file mode 100644 index 6f1c31dc798..00000000000 Binary files a/downloads/stata-13-test-files/Stata14TestFile.dta and /dev/null differ diff --git a/local_lib/com/apicatalog/titanium-json-ld/1.3.0-SNAPSHOT/titanium-json-ld-1.3.0-SNAPSHOT.jar b/local_lib/com/apicatalog/titanium-json-ld/1.3.0-SNAPSHOT/titanium-json-ld-1.3.0-SNAPSHOT.jar deleted file mode 100644 index ee499ae4b76..00000000000 Binary files a/local_lib/com/apicatalog/titanium-json-ld/1.3.0-SNAPSHOT/titanium-json-ld-1.3.0-SNAPSHOT.jar and /dev/null differ diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar deleted file mode 100644 index dc41f94046f..00000000000 Binary files a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar and /dev/null differ diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.md5 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.md5 deleted file mode 100644 index 7018ea4e822..00000000000 --- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.md5 +++ /dev/null @@ -1 +0,0 @@ -eeef5c0dc201d1105b9529a51fa8cdab diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.sha1 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.sha1 deleted file mode 100644 index 97f192f3732..00000000000 --- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1fa716d318920fd59fc63f77965d113decf97355 diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom deleted file mode 100644 index ea2e4c03f9f..00000000000 --- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - edu.harvard.iq.dvn - unf5 - 5.0 - diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.md5 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.md5 deleted file mode 100644 index a88cf2a1c02..00000000000 --- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.md5 +++ /dev/null @@ -1 +0,0 @@ -2df5dac09375e1e7fcd66c705d9ca2ef diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.sha1 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.sha1 deleted file mode 100644 index 967b977b79e..00000000000 --- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.sha1 +++ /dev/null @@ -1 +0,0 @@ -431cd55e2e9379677d14e402dd3c474bb7be4ac9 diff --git a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.jar b/local_lib/net/handle/handle/8.1.1/handle-8.1.1.jar deleted file mode 100644 index 1f8e1c3eb12..00000000000 Binary files a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.jar and /dev/null differ diff --git a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.pom b/local_lib/net/handle/handle/8.1.1/handle-8.1.1.pom deleted file mode 100644 index e3c09349172..00000000000 --- a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.pom +++ /dev/null @@ -1,9 +0,0 @@ - - - 4.0.0 - net.handle - handle - 8.1.1 - POM was created from install:install-file - diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar deleted file mode 100644 index b3bddd62c24..00000000000 Binary files a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar and /dev/null differ diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.md5 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.md5 deleted file mode 100644 index 576062f55a1..00000000000 --- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.md5 +++ /dev/null @@ -1 +0,0 @@ -b0abb2fee242c479f305f47352600bbf diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.sha1 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.sha1 deleted file mode 100644 index e81e8450ef0..00000000000 --- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9643e138cb5ed2684838e4b4faa118adfb2ecb4b diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom deleted file mode 100644 index b57cd67278b..00000000000 --- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - nom.tam.fits - fits - 2012-10-25-generated - diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.md5 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.md5 deleted file mode 100644 index 777b4df3325..00000000000 --- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.md5 +++ /dev/null @@ -1 +0,0 @@ -23ca47c46df791f220a87cfef3b2190c diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.sha1 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.sha1 deleted file mode 100644 index b5f41fd1a69..00000000000 --- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.sha1 +++ /dev/null @@ -1 +0,0 @@ -c1ec9dfbbc72dc4623d309d772b804e47284ee27 diff --git a/local_lib/nom/tam/fits/fits/maven-metadata.xml b/local_lib/nom/tam/fits/fits/maven-metadata.xml deleted file mode 100644 index 4fc3254df3f..00000000000 --- a/local_lib/nom/tam/fits/fits/maven-metadata.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - nom.tam.fits - fits - - 2012-10-25-generated - - 2012-10-25-generated - - 20130925190525 - - diff --git a/local_lib/nom/tam/fits/fits/maven-metadata.xml.md5 b/local_lib/nom/tam/fits/fits/maven-metadata.xml.md5 deleted file mode 100644 index b6d7e4a726f..00000000000 --- a/local_lib/nom/tam/fits/fits/maven-metadata.xml.md5 +++ /dev/null @@ -1 +0,0 @@ -545c78160393b4c80e40377f2a7cf406 \ No newline at end of file diff --git a/local_lib/nom/tam/fits/fits/maven-metadata.xml.sha1 b/local_lib/nom/tam/fits/fits/maven-metadata.xml.sha1 deleted file mode 100644 index 188cf8ae044..00000000000 --- a/local_lib/nom/tam/fits/fits/maven-metadata.xml.sha1 +++ /dev/null @@ -1 +0,0 @@ -9cf56b8ef3f2bacdc669c2c7cdcd7cd50ed38dbb \ No newline at end of file diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar b/local_lib/org/dataverse/unf/6.0/unf-6.0.jar deleted file mode 100644 index d2738e2dadd..00000000000 Binary files a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar and /dev/null differ diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.md5 b/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.md5 deleted file mode 100644 index 04ca3e73ce8..00000000000 --- a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.md5 +++ /dev/null @@ -1 +0,0 @@ -bd9b84a9ad737a81a2699ab81541a901 diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.sha1 b/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.sha1 deleted file mode 100644 index a48cef32570..00000000000 --- a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4cad279c362e4c5c17a2058dc2c8f2fc97c76bf8 diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom b/local_lib/org/dataverse/unf/6.0/unf-6.0.pom deleted file mode 100644 index 06f1508723f..00000000000 --- a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - org.dataverse - unf - 6.0 - diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.md5 b/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.md5 deleted file mode 100644 index 138bc9c95f6..00000000000 --- a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.md5 +++ /dev/null @@ -1 +0,0 @@ -230c5b1f5ae71bb2fe80ef9e7209f681 diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.sha1 b/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.sha1 deleted file mode 100644 index 689e8045418..00000000000 --- a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.sha1 +++ /dev/null @@ -1 +0,0 @@ -286b819f2fc7432a94b5940c6171be1589f66a37 diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile index cbda948db14..564216b3572 100644 --- a/modules/container-configbaker/Dockerfile +++ b/modules/container-configbaker/Dockerfile @@ -33,6 +33,7 @@ RUN chmod +x ${SCRIPT_DIR}/*.sh ${BOOTSTRAP_DIR}/*/*.sh # Copy the Solr config bits COPY --from=solr /opt/solr/server/solr/configsets/_default ${SOLR_TEMPLATE}/ COPY maven/solr/*.xml ${SOLR_TEMPLATE}/conf/ +RUN rm ${SOLR_TEMPLATE}/conf/managed-schema.xml # Copy the data from scripts/api that provide the common base setup you'd get from the installer. # ".dockerignore" will take care of taking only the bare necessities diff --git a/modules/container-configbaker/assembly.xml b/modules/container-configbaker/assembly.xml index f5b309175ed..3285eef510a 100644 --- a/modules/container-configbaker/assembly.xml +++ b/modules/container-configbaker/assembly.xml @@ -8,7 +8,7 @@ - conf/solr/8.11.1 + conf/solr/9.3.0 solr @@ -43,4 +43,4 @@ - \ No newline at end of file + diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index d82229fe3d2..c45d59e4f5f 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -131,7 +131,7 @@ - 5.14 + 6.0 17 UTF-8 @@ -149,8 +149,8 @@ 6.2023.8 - 42.5.1 - 8.11.1 + 42.6.0 + 9.3.0 1.12.290 0.177.0 @@ -165,7 +165,7 @@ 4.4.14 - 5.0.0 + 5.1.0 1.15.0 @@ -414,11 +414,6 @@ Unidata All https://artifacts.unidata.ucar.edu/repository/unidata-all/ - - dvn.private - Local repository for hosting jars not available from network repositories. - file://${project.basedir}/local_lib - oss-sonatype diff --git a/pom.xml b/pom.xml index f050176edfd..7ba22d2a076 100644 --- a/pom.xml +++ b/pom.xml @@ -26,7 +26,7 @@ war 1.2.18.4 - 8.5.10 + 9.21.2 1.20.1 0.8.7 5.2.1 @@ -96,7 +96,7 @@ io.gdcc sword2-server - 2.0.0-SNAPSHOT + 2.0.0 @@ -169,7 +169,7 @@ com.google.guava guava - 29.0-jre + 32.1.2-jre jar @@ -283,29 +283,23 @@ org.apache.solr solr-solrj - 8.11.1 + 9.3.0 colt colt 1.2.0 - + - nom.tam.fits - fits - 2012-10-25-generated + gov.nasa.gsfc.heasarc + nom-tam-fits + 1.12.0 net.handle - handle - 8.1.1 - - - - edu.harvard.iq.dvn - unf5 - 5.0 + handle-client + 9.3.1 diff --git a/scripts/installer/Makefile b/scripts/installer/Makefile index a1fbfab782e..399bc65168a 100644 --- a/scripts/installer/Makefile +++ b/scripts/installer/Makefile @@ -55,13 +55,13 @@ ${JHOVE_SCHEMA}: ../../conf/jhove/jhoveConfig.xsd ${INSTALLER_ZIP_DIR} @echo copying jhove schema file /bin/cp ../../conf/jhove/jhoveConfig.xsd ${INSTALLER_ZIP_DIR} -${SOLR_SCHEMA}: ../../conf/solr/8.11.1/schema.xml ../../conf/solr/8.11.1/update-fields.sh ${INSTALLER_ZIP_DIR} +${SOLR_SCHEMA}: ../../conf/solr/9.3.0/schema.xml ../../conf/solr/9.3.0/update-fields.sh ${INSTALLER_ZIP_DIR} @echo copying Solr schema file - /bin/cp ../../conf/solr/8.11.1/schema.xml ../../conf/solr/8.11.1/update-fields.sh ${INSTALLER_ZIP_DIR} + /bin/cp ../../conf/solr/9.3.0/schema.xml ../../conf/solr/9.3.0/update-fields.sh ${INSTALLER_ZIP_DIR} -${SOLR_CONFIG}: ../../conf/solr/8.11.1/solrconfig.xml ${INSTALLER_ZIP_DIR} +${SOLR_CONFIG}: ../../conf/solr/9.3.0/solrconfig.xml ${INSTALLER_ZIP_DIR} @echo copying Solr config file - /bin/cp ../../conf/solr/8.11.1/solrconfig.xml ${INSTALLER_ZIP_DIR} + /bin/cp ../../conf/solr/9.3.0/solrconfig.xml ${INSTALLER_ZIP_DIR} ${PYTHON_FILES}: README_python.txt install.py installConfig.py installAppServer.py installUtils.py requirements.txt default.config interactive.config ${INSTALLER_ZIP_DIR} @echo copying Python installer files diff --git a/scripts/installer/install.py b/scripts/installer/install.py index 700c70dbc28..3aedbd8c6ad 100644 --- a/scripts/installer/install.py +++ b/scripts/installer/install.py @@ -380,12 +380,13 @@ print("Can't connect to PostgresQL as the admin user.\n") sys.exit("Is the server running, have you adjusted pg_hba.conf, etc?") - # 3b. get the Postgres version (do we need it still?) + # 3b. get the Postgres version for new permissions model in versions 15+ try: - pg_full_version = conn.server_version - print("PostgresQL version: "+str(pg_full_version)) + pg_full_version = str(conn.server_version) + pg_major_version = pg_full_version[0:2] + print("PostgreSQL version: "+pg_major_version) except: - print("Warning: Couldn't determine PostgresQL version.") + print("Warning: Couldn't determine PostgreSQL version.") conn.close() # 3c. create role: @@ -410,7 +411,9 @@ else: sys.exit("Couldn't create database or database already exists.\n") - conn_cmd = "GRANT ALL PRIVILEGES on DATABASE "+pgDb+" to "+pgUser+";" + # 3e. set permissions: + + conn_cmd = "GRANT CREATE PRIVILEGES on DATABASE "+pgDb+" to "+pgUser+";" try: cur.execute(conn_cmd) except: @@ -418,6 +421,19 @@ cur.close() conn.close() + if int(pg_major_version) >= 15: + conn_cmd = "GRANT ALL ON SCHEMA public TO "+pgUser+";" + print("PostgreSQL 15 or higher detected. Running " + conn_cmd) + try: + cur.execute(conn_cmd) + except: + if force: + print("WARNING: failed to grant permissions on schema public - continuing, since the --force option was specified") + else: + sys.exit("Couldn't grant privileges on schema public to "+pgUser) + cur.close() + conn.close() + print("Database and role created!") if pgOnly: print("postgres-only setup complete.") diff --git a/scripts/vagrant/install-dataverse.sh b/scripts/vagrant/install-dataverse.sh deleted file mode 100644 index c9873f7d3ec..00000000000 --- a/scripts/vagrant/install-dataverse.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env bash - -if [ ! -z "$1" ]; then - MAILSERVER=$1 - MAILSERVER_ARG="--mailserver $MAILSERVER" -fi -WAR=/dataverse/target/dataverse*.war -if [ ! -f $WAR ]; then - echo "no war file found... building" - #echo "Installing nss on CentOS 6 to avoid java.security.KeyException while building war file: https://github.com/IQSS/dataverse/issues/2744" - #yum install -y nss - su $SUDO_USER -s /bin/sh -c "cd /dataverse && source /etc/profile.d/maven.sh && mvn -q package" -fi -cd /dataverse/scripts/installer - -# move any pre-existing `default.config` file out of the way to avoid overwriting -pid=$$ -if [ -e default.config ]; then - cp default.config tmp-${pid}-default.config -fi - -# Switch to newer Python-based installer -python3 ./install.py --noninteractive --config_file="default.config" - -if [ -e tmp-${pid}-default.config ]; then # if we moved it out, move it back - mv -f tmp-${pid}-default.config default.config -fi - -echo "If "vagrant up" was successful (check output above) Dataverse is running on port 8080 of the Linux machine running within Vagrant, but this port has been forwarded to port 8088 of the computer you ran "vagrant up" on. For this reason you should go to http://localhost:8088 to see the Dataverse app running." - -echo "Please also note that the installation script has now started Payara, but has not set up an autostart mechanism for it.\nTherefore, the next time this VM is started, Payara must be started manually.\nSee https://guides.dataverse.org/en/latest/installation/prerequisites.html#launching-payara-on-system-boot for details." diff --git a/scripts/vagrant/rpmbuild.sh b/scripts/vagrant/rpmbuild.sh deleted file mode 100755 index f10830afb5b..00000000000 --- a/scripts/vagrant/rpmbuild.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/sh -rpm -Uvh http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-7.noarch.rpm -yum install -y rpm-build httpd-devel libapreq2-devel R-devel diff --git a/scripts/vagrant/setup-counter-processor.sh b/scripts/vagrant/setup-counter-processor.sh deleted file mode 100755 index a418e8d6251..00000000000 --- a/scripts/vagrant/setup-counter-processor.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -echo "Setting up counter-processor" -echo "Installing dependencies" -yum -y install unzip vim-enhanced -yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm -# EPEL provides Python 3.6.6, new enough (3.6.4 in .python-version) -yum -y install python36 jq -# "ensurepip" tip from https://stackoverflow.com/questions/50408941/recommended-way-to-install-pip3-on-centos7/52518512#52518512 -python3.6 -m ensurepip -# FIXME: actually use this dedicated "counter" user. -COUNTER_USER=counter -echo "Ensuring Unix user '$COUNTER_USER' exists" -useradd $COUNTER_USER || : -COMMIT='7974dad259465ba196ef639f48dea007cae8f9ac' -UNZIPPED_DIR="counter-processor-$COMMIT" -if [ ! -e $UNZIPPED_DIR ]; then - ZIP_FILE="${COMMIT}.zip" - echo "Downloading and unzipping $ZIP_FILE" - wget https://github.com/CDLUC3/counter-processor/archive/$ZIP_FILE - unzip $ZIP_FILE -fi -cd $UNZIPPED_DIR -echo "Installation of the GeoLite2 country database for counter-processor can no longer be automated. See the Installation Guide for the manual installation process." -pip3 install -r requirements.txt -# For now, parsing sample_logs/counter_2018-05-08.log -for i in `echo {00..31}`; do - # avoid errors like: No such file or directory: 'sample_logs/counter_2018-05-01.log' - touch sample_logs/counter_2018-05-$i.log -done -#LOG_GLOB="sample_logs/counter_2018-05-*.log" -#START_DATE="2018-05-08" -#END_DATE="2018-05-09" -CONFIG_FILE=/dataverse/scripts/vagrant/counter-processor-config.yaml python3.6 main.py diff --git a/scripts/vagrant/setup-solr.sh b/scripts/vagrant/setup-solr.sh deleted file mode 100755 index 70d3fc632a7..00000000000 --- a/scripts/vagrant/setup-solr.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -echo "Setting up Solr" -dnf install -qy lsof -SOLR_USER=solr -SOLR_HOME=/usr/local/solr -mkdir $SOLR_HOME -chown $SOLR_USER:$SOLR_USER $SOLR_HOME -su $SOLR_USER -s /bin/sh -c "cp /dataverse/downloads/solr-8.11.1.tgz $SOLR_HOME" -su $SOLR_USER -s /bin/sh -c "cd $SOLR_HOME && tar xfz solr-8.11.1.tgz" -su $SOLR_USER -s /bin/sh -c "cd $SOLR_HOME/solr-8.11.1/server/solr && cp -r configsets/_default . && mv _default collection1" -su $SOLR_USER -s /bin/sh -c "cp /dataverse/conf/solr/8.11.1/schema*.xml $SOLR_HOME/solr-8.11.1/server/solr/collection1/conf/" -su $SOLR_USER -s /bin/sh -c "cp /dataverse/conf/solr/8.11.1/solrconfig.xml $SOLR_HOME/solr-8.11.1/server/solr/collection1/conf/solrconfig.xml" -su $SOLR_USER -s /bin/sh -c "cd $SOLR_HOME/solr-8.11.1 && bin/solr start && bin/solr create_core -c collection1 -d server/solr/collection1/conf/" -cp /dataverse/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr /etc/init.d/solr -chmod 755 /etc/init.d/solr -/etc/init.d/solr stop -/etc/init.d/solr start -chkconfig solr on diff --git a/scripts/vagrant/setup.sh b/scripts/vagrant/setup.sh deleted file mode 100644 index 001c3ec5dd2..00000000000 --- a/scripts/vagrant/setup.sh +++ /dev/null @@ -1,96 +0,0 @@ -#!/bin/bash -echo "Installing dependencies for Dataverse" - -# wget seems to be missing in box 'bento/centos-8.2' -dnf install -qy wget - -# python3 and psycopg2 for the Dataverse installer -dnf install -qy python3 python3-psycopg2 - -# JQ -echo "Installing jq for the setup scripts" -dnf install -qy epel-release -dnf install -qy jq - -echo "Adding Shibboleth yum repo" -cp /dataverse/conf/vagrant/etc/yum.repos.d/shibboleth.repo /etc/yum.repos.d -# Uncomment this (and other shib stuff below) if you want -# to use Vagrant (and maybe PageKite) to test Shibboleth. -#yum install -y shibboleth shibboleth-embedded-ds - -# java configuration et alia -dnf install -qy java-11-openjdk-devel httpd mod_ssl unzip -alternatives --set java /usr/lib/jvm/jre-11-openjdk/bin/java -java -version - -# maven included in centos8 requires 1.8.0 - download binary instead -wget -q https://archive.apache.org/dist/maven/maven-3/3.8.2/binaries/apache-maven-3.8.2-bin.tar.gz -tar xfz apache-maven-3.8.2-bin.tar.gz -mkdir /opt/maven -mv apache-maven-3.8.2/* /opt/maven/ -echo "export JAVA_HOME=/usr/lib/jvm/jre-openjdk" > /etc/profile.d/maven.sh -echo "export M2_HOME=/opt/maven" >> /etc/profile.d/maven.sh -echo "export MAVEN_HOME=/opt/maven" >> /etc/profile.d/maven.sh -echo "export PATH=/opt/maven/bin:${PATH}" >> /etc/profile.d/maven.sh -chmod 0755 /etc/profile.d/maven.sh - -# disable centos8 postgresql module and install postgresql13-server -dnf -qy module disable postgresql -dnf install -qy https://download.postgresql.org/pub/repos/yum/reporpms/EL-8-x86_64/pgdg-redhat-repo-latest.noarch.rpm -dnf install -qy postgresql13-server -/usr/pgsql-13/bin/postgresql-13-setup initdb -/usr/bin/systemctl stop postgresql-13 -cp /dataverse/conf/vagrant/var/lib/pgsql/data/pg_hba.conf /var/lib/pgsql/13/data/pg_hba.conf -/usr/bin/systemctl start postgresql-13 -/usr/bin/systemctl enable postgresql-13 - -PAYARA_USER=dataverse -echo "Ensuring Unix user '$PAYARA_USER' exists" -useradd $PAYARA_USER || : -SOLR_USER=solr -echo "Ensuring Unix user '$SOLR_USER' exists" -useradd $SOLR_USER || : -DOWNLOAD_DIR='/dataverse/downloads' -PAYARA_ZIP="$DOWNLOAD_DIR/payara-6.2023.8.zip" -SOLR_TGZ="$DOWNLOAD_DIR/solr-8.11.1.tgz" -if [ ! -f $PAYARA_ZIP ] || [ ! -f $SOLR_TGZ ]; then - echo "Couldn't find $PAYARA_ZIP or $SOLR_TGZ! Running download script...." - cd $DOWNLOAD_DIR && ./download.sh && cd - echo "Done running download script." -fi -PAYARA_USER_HOME=~dataverse -PAYARA_ROOT=/usr/local/payara6 -if [ ! -d $PAYARA_ROOT ]; then - echo "Copying $PAYARA_ZIP to $PAYARA_USER_HOME and unzipping" - su $PAYARA_USER -s /bin/sh -c "cp $PAYARA_ZIP $PAYARA_USER_HOME" - su $PAYARA_USER -s /bin/sh -c "cd $PAYARA_USER_HOME && unzip -q $PAYARA_ZIP" - # default.config defaults to /usr/local/payara6 so let's go with that - rsync -a $PAYARA_USER_HOME/payara6/ $PAYARA_ROOT/ -else - echo "$PAYARA_ROOT already exists" -fi - -#service shibd start -/usr/bin/systemctl stop httpd -cp /dataverse/conf/httpd/conf.d/dataverse.conf /etc/httpd/conf.d/dataverse.conf -mkdir -p /var/www/dataverse/error-documents -cp /dataverse/conf/vagrant/var/www/dataverse/error-documents/503.html /var/www/dataverse/error-documents -/usr/bin/systemctl start httpd -#curl -k --sslv3 https://pdurbin.pagekite.me/Shibboleth.sso/Metadata > /tmp/pdurbin.pagekite.me -#cp -a /etc/shibboleth/shibboleth2.xml /etc/shibboleth/shibboleth2.xml.orig -#cp -a /etc/shibboleth/attribute-map.xml /etc/shibboleth/attribute-map.xml.orig -# need more attributes, such as sn, givenName, mail -#cp /dataverse/conf/vagrant/etc/shibboleth/attribute-map.xml /etc/shibboleth/attribute-map.xml -# FIXME: automate this? -#curl 'https://www.testshib.org/cgi-bin/sp2config.cgi?dist=Others&hostname=pdurbin.pagekite.me' > /etc/shibboleth/shibboleth2.xml -#cp /dataverse/conf/vagrant/etc/shibboleth/shibboleth2.xml /etc/shibboleth/shibboleth2.xml -#service shibd restart -#curl -k --sslv3 https://pdurbin.pagekite.me/Shibboleth.sso/Metadata > /downloads/pdurbin.pagekite.me -#service httpd restart - -echo "#########################################################################################" -echo "# This is a Vagrant test box, so we're disabling firewalld. # -echo "# Re-enable it with $ sudo systemctl enable firewalld && sudo systemctl start firewalld #" -echo "#########################################################################################" -systemctl disable firewalld -systemctl stop firewalld diff --git a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java index 6827ff33530..f1bfc3e290b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java @@ -222,6 +222,11 @@ public GlobalId parsePersistentId(String fullIdentifierString) { if(!isConfigured()) { return null; } + // Occasionally, the protocol separator character ':' comes in still + // URL-encoded as %3A (usually as a result of the URL having been + // encoded twice): + fullIdentifierString = fullIdentifierString.replace("%3A", ":"); + int index1 = fullIdentifierString.indexOf(':'); if (index1 > 0) { // ':' found with one or more characters before it String protocol = fullIdentifierString.substring(0, index1); diff --git a/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java b/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java index d2c522b5c89..832d7ec19ef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java @@ -96,7 +96,8 @@ private String sendError(int errorCode) { try { context.getExternalContext().responseSendError(errorCode,null); } catch (IOException ex) { - Logger.getLogger(PermissionsWrapper.class.getName()).log(Level.SEVERE, null, ex); + //Logger.getLogger(PermissionsWrapper.class.getName()).log(Level.SEVERE, null, ex); + Logger.getLogger(NavigationWrapper.class.getName()).fine("Caught exception in sendError(): "+ex.getMessage()); } context.responseComplete(); return ""; diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java index ee4475e12c8..2bd666401c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java @@ -27,7 +27,7 @@ import jakarta.persistence.PersistenceContext; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.HttpSolrClient.RemoteSolrException; +import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index 5df8c58ff35..44976d232c2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -44,7 +44,7 @@ import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery.SortClause; import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.HttpSolrClient.RemoteSolrException; +import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException; import org.apache.solr.client.solrj.response.FacetField; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.RangeFacet; diff --git a/src/main/webapp/404static.xhtml b/src/main/webapp/404static.xhtml new file mode 100644 index 00000000000..69ff17ebc0f --- /dev/null +++ b/src/main/webapp/404static.xhtml @@ -0,0 +1,109 @@ + + + + + #{bundle['error.404.page.title']} + + + + + + + + + + + + + + + + + + + +
+ + +
+
+ +
+ + + diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml index f16da2c8e5b..427615f2f0b 100644 --- a/src/main/webapp/WEB-INF/web.xml +++ b/src/main/webapp/WEB-INF/web.xml @@ -7,7 +7,7 @@ 404 - /404.xhtml + /404static.xhtml 500 diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index 4021ad7bc65..f28efc47705 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -74,7 +74,7 @@ styleClass="btn-download" process="@this" disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" - action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'GlobusTransfer')}" + actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'GlobusTransfer')}" update="@widgetVar(downloadPopup)" oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');"> @@ -101,7 +101,7 @@ styleClass="btn-download" process="@this" disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" - action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'package')}" + actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'package')}" update="@widgetVar(downloadPopup)" oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');"> @@ -123,7 +123,7 @@ styleClass="btn-download" process="@this" disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" - action="#{guestbookResponseService.modifyDatafile(guestbookResponse, fileMetadata)}" + actionListener="#{guestbookResponseService.modifyDatafile(guestbookResponse, fileMetadata)}" update="@widgetVar(downloadPopup)" oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');"> @@ -141,7 +141,7 @@ #{bundle['file.downloadBtn.format.all']} @@ -161,7 +161,7 @@ @@ -178,7 +178,7 @@ #{bundle['file.downloadBtn.format.tab']} @@ -197,7 +197,7 @@ @@ -224,7 +224,7 @@ #{bundle['file.downloadBtn.format.var']} @@ -311,7 +311,7 @@ + +

@@ -51,6 +53,7 @@

+
+
\ No newline at end of file diff --git a/src/main/webapp/file-edit-button-fragment.xhtml b/src/main/webapp/file-edit-button-fragment.xhtml index a31438f8e06..4dac1613266 100644 --- a/src/main/webapp/file-edit-button-fragment.xhtml +++ b/src/main/webapp/file-edit-button-fragment.xhtml @@ -95,8 +95,7 @@ + oncomplete="PF('fileEmbargoPopup').show();"> @@ -135,4 +134,4 @@
- \ No newline at end of file + diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index 2aee3bd38b2..a5a4924ad77 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -734,6 +734,13 @@ public void testMigrateHDLToDOI() { .statusCode(OK.getStatusCode()); } + /** + * Disabled because once there are new fields in the database that Solr + * doesn't know about, dataset creation could be prevented, or at least + * subsequent search operations could fail because the dataset can't be + * indexed. + */ + @Disabled @Test public void testLoadMetadataBlock_NoErrorPath() { Response createUser = UtilIT.createRandomUser(); @@ -778,6 +785,13 @@ public void testLoadMetadataBlock_NoErrorPath() { assertEquals(244, (int) statistics.get("Controlled Vocabulary")); } + /** + * Disabled because once there are new fields in the database that Solr + * doesn't know about, dataset creation could be prevented, or at least + * subsequent search operations could fail because the dataset can't be + * indexed. + */ + @Disabled @Test public void testLoadMetadataBlock_ErrorHandling() { Response createUser = UtilIT.createRandomUser(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 1a3f4125d36..09052f9e4ea 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -372,7 +372,7 @@ public void testMoveDataverse() { while (checkIndex) { try { try { - Thread.sleep(2000); + Thread.sleep(4000); } catch (InterruptedException ex) { } Response search = UtilIT.search("id:dataverse_" + dataverseId + "&subtree=" + dataverseAlias2, apiToken); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 4c7bbfc5793..d0f20a8642b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -34,6 +34,7 @@ import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.CoreMatchers.nullValue; import org.hamcrest.Matchers; +import org.junit.jupiter.api.AfterAll; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -47,6 +48,15 @@ public class FilesIT { @BeforeAll public static void setUpClass() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + + Response removePublicInstall = UtilIT.deleteSetting(SettingsServiceBean.Key.PublicInstall); + removePublicInstall.then().assertThat().statusCode(200); + + } + + @AfterAll + public static void tearDownClass() { + UtilIT.deleteSetting(SettingsServiceBean.Key.PublicInstall); } /** @@ -1095,6 +1105,9 @@ public void testAccessFacet() { msg("Add initial file"); String pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + + // Wait a little while for the index to pick up the file, otherwise timing issue with searching for it. + UtilIT.sleepForReindex(datasetId.toString(), apiToken, 4); String successMsgAdd = BundleUtil.getStringFromBundle("file.addreplace.success.add"); @@ -1105,9 +1118,9 @@ public void testAccessFacet() { long fileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id"); - Response searchShouldFindNothingBecauseUnpublished = UtilIT.search("id:datafile_" + fileId + "_draft", apiToken); - searchShouldFindNothingBecauseUnpublished.prettyPrint(); - searchShouldFindNothingBecauseUnpublished.then().assertThat() + Response searchShouldFindBecauseAuthorApiTokenSupplied = UtilIT.search("id:datafile_" + fileId + "_draft", apiToken); + searchShouldFindBecauseAuthorApiTokenSupplied.prettyPrint(); + searchShouldFindBecauseAuthorApiTokenSupplied.then().assertThat() .body("data.total_count", equalTo(1)) .statusCode(OK.getStatusCode()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FitsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FitsIT.java index 763b61000d8..e788efc9c87 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FitsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FitsIT.java @@ -60,6 +60,7 @@ public void testAstroFieldsFromFits() throws IOException { getJson.prettyPrint(); getJson.then().assertThat() .statusCode(OK.getStatusCode()) + .body("data.latestVersion.files[0].description", equalTo("FITS file, 2 HDUs total:\nThe primary HDU; 1 Table HDU(s) 1 Image HDU(s); \nThe following recognized metadata keys have been found in the FITS file:\nCRVAL2; NAXIS; INSTRUME; NAXIS1; NAXIS0; EXPTIME; CD1_1; CRVAL1; TARGNAME; DATE-OBS; \n")) .body("data.latestVersion.metadataBlocks.astrophysics.fields[0].value[0]", equalTo("Image")); // a bit more precise than the check for "Image" above (but annoyingly fiddly) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java index d448a46a066..f7135ce7f3b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java @@ -300,7 +300,7 @@ public void testMoveLinkedDataset() { .statusCode(OK.getStatusCode()) .body("feed.entry[0].id", CoreMatchers.endsWith(datasetPid)); - UtilIT.sleepForReindex(datasetPid, superuserApiToken, 10); + UtilIT.sleepForReindex(datasetPid, superuserApiToken, 20); Response getLinksAfter = UtilIT.getDatasetLinks(datasetPid, superuserApiToken); getLinksAfter.prettyPrint(); getLinksAfter.then().assertThat() diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java index 807d1cac907..125753296a2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java @@ -814,7 +814,7 @@ public void testNestedSubtree() { .statusCode(OK.getStatusCode()); try { - Thread.sleep(2000); + Thread.sleep(4000); } catch (InterruptedException ex) { /** * This sleep is here because dataverseAlias2 is showing with @@ -966,6 +966,9 @@ public void testSubtreePermissions() { Response datasetAsJson2 = UtilIT.nativeGet(datasetId2, apiToken); datasetAsJson2.then().assertThat() .statusCode(OK.getStatusCode()); + + // Wait a little while for the index to pick up the datasets, otherwise timing issue with searching for it. + UtilIT.sleepForReindex(datasetId2.toString(), apiToken, 2); String identifier = JsonPath.from(datasetAsJson.getBody().asString()).getString("data.identifier"); String identifier2 = JsonPath.from(datasetAsJson2.getBody().asString()).getString("data.identifier"); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 0e3998f48a6..e47971f9b92 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -563,7 +563,14 @@ static Response updateDatasetPIDMetadata(String persistentId, String apiToken) .post("/api/datasets/:persistentId/modifyRegistrationMetadata/?persistentId=" + persistentId); return response; } - + + /** + * Deprecated because once there are new fields in the database that Solr + * doesn't know about, dataset creation could be prevented, or at least + * subsequent search operations could fail because the dataset can't be + * indexed. + */ + @Deprecated static Response loadMetadataBlock(String apiToken, byte[] body) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) diff --git a/tests/shell/spec/update_fields_spec.sh b/tests/shell/spec/update_fields_spec.sh index e77121672dd..48054a121b7 100644 --- a/tests/shell/spec/update_fields_spec.sh +++ b/tests/shell/spec/update_fields_spec.sh @@ -1,16 +1,16 @@ #shellcheck shell=sh update_fields() { - ../../conf/solr/8.11.1/update-fields.sh "$@" + ../../conf/solr/9.3.0/update-fields.sh "$@" } Describe "Update fields command" Describe "can operate on upstream data" - copyUpstreamSchema() { cp ../../conf/solr/8.11.1/schema.xml data/solr/upstream-schema.xml; } + copyUpstreamSchema() { cp ../../conf/solr/9.3.0/schema.xml data/solr/upstream-schema.xml; } AfterAll 'copyUpstreamSchema' - Path schema-xml="../../conf/solr/8.11.1/schema.xml" + Path schema-xml="../../conf/solr/9.3.0/schema.xml" It "needs upstream schema.xml" The path schema-xml should be exist End