diff --git a/.env b/.env
index 3467f8df73b..e3ececc2e54 100644
--- a/.env
+++ b/.env
@@ -1,4 +1,4 @@
 APP_IMAGE=gdcc/dataverse:unstable
 POSTGRES_VERSION=13
 DATAVERSE_DB_USER=dataverse
-SOLR_VERSION=8.11.1
+SOLR_VERSION=9.3.0
diff --git a/.github/workflows/container_app_pr.yml b/.github/workflows/container_app_pr.yml
index 9e514690a13..c86d284e74b 100644
--- a/.github/workflows/container_app_pr.yml
+++ b/.github/workflows/container_app_pr.yml
@@ -25,7 +25,7 @@ jobs:
                   ref: 'refs/pull/${{ github.event.client_payload.pull_request.number }}/merge'
             - uses: actions/setup-java@v3
               with:
-                  java-version: "11"
+                  java-version: "17"
                   distribution: 'adopt'
             - uses: actions/cache@v3
               with:
diff --git a/.github/workflows/container_app_push.yml b/.github/workflows/container_app_push.yml
index c60691b1c85..b3e247e376c 100644
--- a/.github/workflows/container_app_push.yml
+++ b/.github/workflows/container_app_push.yml
@@ -36,10 +36,10 @@ jobs:
             - name: Checkout repository
               uses: actions/checkout@v3
 
-            - name: Set up JDK 11
+            - name: Set up JDK
               uses: actions/setup-java@v3
               with:
-                  java-version: "11"
+                  java-version: "17"
                   distribution: temurin
                   cache: maven
 
@@ -99,17 +99,21 @@ jobs:
         name: "Package & Publish"
         runs-on: ubuntu-latest
         # Only run this job if we have access to secrets. This is true for events like push/schedule which run in
-        # context of main repo, but for PRs only true if coming from the main repo! Forks have no secret access.
-        if: needs.check-secrets.outputs.available == 'true'
+        # context of the main repo, but for PRs only true if coming from the main repo! Forks have no secret access.
+        #
+        # Note: The team's decision was to not auto-deploy an image on any git push where no PR exists (yet).
+        #       Accordingly, only run for push events on branches develop and master.
+        if: needs.check-secrets.outputs.available == 'true' &&
+            ( github.event_name != 'push' || ( github.event_name == 'push' && contains(fromJSON('["develop", "master"]'), github.ref_name)))
         steps:
             - uses: actions/checkout@v3
             - uses: actions/setup-java@v3
               with:
-                  java-version: "11"
+                  java-version: "17"
                   distribution: temurin
 
             # Depending on context, we push to different targets. Login accordingly.
-            - if: ${{ github.event_name != 'pull_request' }}
+            - if: github.event_name != 'pull_request'
               name: Log in to Docker Hub registry
               uses: docker/login-action@v2
               with:
diff --git a/.github/workflows/container_base_push.yml b/.github/workflows/container_base_push.yml
index 5c62fb0c811..b938851f816 100644
--- a/.github/workflows/container_base_push.yml
+++ b/.github/workflows/container_base_push.yml
@@ -34,7 +34,7 @@ jobs:
             packages: read
         strategy:
             matrix:
-                jdk: [ '11' ]
+                jdk: [ '17' ]
         # Only run in upstream repo - avoid unnecessary runs in forks
         if: ${{ github.repository_owner == 'IQSS' }}
 
diff --git a/.github/workflows/cypress_ui.yml.future b/.github/workflows/cypress_ui.yml.future
index b38ae2f9558..0823233fdeb 100644
--- a/.github/workflows/cypress_ui.yml.future
+++ b/.github/workflows/cypress_ui.yml.future
@@ -2,6 +2,7 @@
 #
 #   THIS IS AN OLD TRAVIS-CI.ORG JOB FILE
 #   To be used with Github Actions, it would be necessary to refactor it.
+#   In addition, it needs to be rewritten to use our modern containers.
 #   Keeping it as the future example it has been before.
 #   See also #5846
 #
@@ -30,8 +31,6 @@ jobs:
         directories:
           # we also need to cache folder with Cypress binary
           - ~/.cache
-          # we want to cache the Glassfish and Solr dependencies as well
-          - conf/docker-aio/dv/deps
       before_install:
         - cd tests
       install:
diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml
index 3e67bfe426e..2443ef8b2e0 100644
--- a/.github/workflows/deploy_beta_testing.yml
+++ b/.github/workflows/deploy_beta_testing.yml
@@ -8,7 +8,6 @@ on:
 jobs:
   build:
     runs-on: ubuntu-latest
-    environment: beta-testing
 
     steps:
       - uses: actions/checkout@v3
@@ -16,7 +15,11 @@ jobs:
       - uses: actions/setup-java@v3
         with:
           distribution: 'zulu'
-          java-version: '11'
+          java-version: '17'
+
+      - name: Enable API Session Auth feature flag
+        working-directory: src/main/resources/META-INF
+        run: echo -e "dataverse.feature.api-session-auth=true" >> microprofile-config.properties
 
       - name: Build application war
         run: mvn package
@@ -34,7 +37,6 @@ jobs:
   deploy-to-payara:
     needs: build
     runs-on: ubuntu-latest
-    environment: beta-testing
 
     steps:
       - uses: actions/checkout@v3
@@ -51,11 +53,11 @@ jobs:
       - name: Copy war file to remote instance
         uses: appleboy/scp-action@master
         with:
-          host: ${{ secrets.PAYARA_INSTANCE_HOST }}
-          username: ${{ secrets.PAYARA_INSTANCE_USERNAME }}
-          key: ${{ secrets.PAYARA_INSTANCE_SSH_PRIVATE_KEY }}
+          host: ${{ secrets.BETA_PAYARA_INSTANCE_HOST }}
+          username: ${{ secrets.BETA_PAYARA_INSTANCE_USERNAME }}
+          key: ${{ secrets.BETA_PAYARA_INSTANCE_SSH_PRIVATE_KEY }}
           source: './${{ env.war_file }}'
-          target: '/home/${{ secrets.PAYARA_INSTANCE_USERNAME }}'
+          target: '/home/${{ secrets.BETA_PAYARA_INSTANCE_USERNAME }}'
           overwrite: true
 
       - name: Execute payara war deployment remotely
@@ -63,17 +65,17 @@ jobs:
         env:
           INPUT_WAR_FILE: ${{ env.war_file }}
         with:
-          host: ${{ secrets.PAYARA_INSTANCE_HOST }}
-          username: ${{ secrets.PAYARA_INSTANCE_USERNAME }}
-          key: ${{ secrets.PAYARA_INSTANCE_SSH_PRIVATE_KEY }}
+          host: ${{ secrets.BETA_PAYARA_INSTANCE_HOST }}
+          username: ${{ secrets.BETA_PAYARA_INSTANCE_USERNAME }}
+          key: ${{ secrets.BETA_PAYARA_INSTANCE_SSH_PRIVATE_KEY }}
           envs: INPUT_WAR_FILE
           script: |
             APPLICATION_NAME=dataverse-backend
-            ASADMIN='/usr/local/payara5/bin/asadmin --user admin'
+            ASADMIN='/usr/local/payara6/bin/asadmin --user admin'
             $ASADMIN undeploy $APPLICATION_NAME
             $ASADMIN stop-domain
-            rm -rf /usr/local/payara5/glassfish/domains/domain1/generated
-            rm -rf /usr/local/payara5/glassfish/domains/domain1/osgi-cache
+            rm -rf /usr/local/payara6/glassfish/domains/domain1/generated
+            rm -rf /usr/local/payara6/glassfish/domains/domain1/osgi-cache
             $ASADMIN start-domain
             $ASADMIN deploy --name $APPLICATION_NAME $INPUT_WAR_FILE
             $ASADMIN stop-domain
diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml
index 45beabf3193..efa3fa4a471 100644
--- a/.github/workflows/maven_unit_test.yml
+++ b/.github/workflows/maven_unit_test.yml
@@ -22,18 +22,9 @@ jobs:
         strategy:
             fail-fast: false
             matrix:
-                jdk: [ '11' ]
+                jdk: [ '17' ]
                 experimental: [false]
                 status:  ["Stable"]
-                #
-                # JDK 17 builds disabled due to non-essential fails marking CI jobs as completely failed within
-                # Github Projects, PR lists etc. This was consensus on Slack #dv-tech. See issue #8094
-                # (This is a limitation of how Github is currently handling these things.)
-                #
-                #include:
-                #    - jdk: '17'
-                #      experimental: true
-                #      status: "Experimental"
         continue-on-error: ${{ matrix.experimental }}
         runs-on: ubuntu-latest
         steps:
@@ -68,6 +59,14 @@ jobs:
 
           # We don't want to cache the WAR file, so delete it
           - run: rm -rf ~/.m2/repository/edu/harvard/iq/dataverse
+
+          # Upload the built war file. For download, it will be wrapped in a ZIP by GitHub.
+          # See also https://github.com/actions/upload-artifact#zipped-artifact-downloads
+          - uses: actions/upload-artifact@v3
+            with:
+                name: dataverse-java${{ matrix.jdk }}.war
+                path: target/dataverse*.war
+                retention-days: 7
     push-app-img:
         name: Publish App Image
         permissions:
diff --git a/.github/workflows/shellcheck.yml b/.github/workflows/shellcheck.yml
index 94ba041e135..56f7d648dc4 100644
--- a/.github/workflows/shellcheck.yml
+++ b/.github/workflows/shellcheck.yml
@@ -33,7 +33,6 @@ jobs:
                   # Exclude old scripts
                   exclude: |
                       */.git/*
-                      conf/docker-aio/*
                       doc/*
                       downloads/*
                       scripts/database/*
@@ -43,5 +42,4 @@ jobs:
                       scripts/issues/*
                       scripts/r/*
                       scripts/tests/*
-                      scripts/vagrant/*
                       tests/*
diff --git a/.github/workflows/shellspec.yml b/.github/workflows/shellspec.yml
index 5c251cfc897..227a74fa00f 100644
--- a/.github/workflows/shellspec.yml
+++ b/.github/workflows/shellspec.yml
@@ -60,7 +60,7 @@ jobs:
                   shellspec
     shellspec-macos:
         name: "MacOS"
-        runs-on: macos-10.15
+        runs-on: macos-latest
         steps:
             - name: Install shellspec
               run: curl -fsSL https://git.io/shellspec | sh -s 0.28.1 --yes
diff --git a/.github/workflows/spi_release.yml b/.github/workflows/spi_release.yml
index 1fbf05ce693..8ad74b3e4bb 100644
--- a/.github/workflows/spi_release.yml
+++ b/.github/workflows/spi_release.yml
@@ -2,12 +2,12 @@ name: Dataverse SPI
 
 on:
     push:
-        branch:
+        branches:
             - "develop"
         paths:
             - "modules/dataverse-spi/**"
     pull_request:
-        branch:
+        branches:
             - "develop"
         paths:
             - "modules/dataverse-spi/**"
@@ -40,7 +40,7 @@ jobs:
             - uses: actions/checkout@v3
             - uses: actions/setup-java@v3
               with:
-                  java-version: '11'
+                  java-version: '17'
                   distribution: 'adopt'
                   server-id: ossrh
                   server-username: MAVEN_USERNAME
@@ -66,7 +66,7 @@ jobs:
             -   uses: actions/checkout@v3
             -   uses: actions/setup-java@v3
                 with:
-                    java-version: '11'
+                    java-version: '17'
                     distribution: 'adopt'
             -   uses: actions/cache@v2
                 with:
@@ -78,7 +78,7 @@ jobs:
             -   name: Set up Maven Central Repository
                 uses: actions/setup-java@v3
                 with:
-                    java-version: '11'
+                    java-version: '17'
                     distribution: 'adopt'
                     server-id: ossrh
                     server-username: MAVEN_USERNAME
@@ -91,4 +91,4 @@ jobs:
                 env:
                     MAVEN_USERNAME: ${{ secrets.DATAVERSEBOT_SONATYPE_USERNAME }}
                     MAVEN_PASSWORD: ${{ secrets.DATAVERSEBOT_SONATYPE_TOKEN }}
-                    MAVEN_GPG_PASSPHRASE: ${{ secrets.DATAVERSEBOT_GPG_PASSWORD }}
\ No newline at end of file
+                    MAVEN_GPG_PASSPHRASE: ${{ secrets.DATAVERSEBOT_GPG_PASSWORD }}
diff --git a/.gitignore b/.gitignore
index d38538fc364..7f0d3a2b466 100644
--- a/.gitignore
+++ b/.gitignore
@@ -18,7 +18,6 @@ GRTAGS
 .Trashes
 ehthumbs.db
 Thumbs.db
-.vagrant
 *.pyc
 *.swp
 scripts/api/py_api_wrapper/demo-data/*
@@ -39,17 +38,6 @@ scripts/api/setup-all.*.log
 # ctags generated tag file
 tags
 
-# dependencies I'm not sure we're allowed to redistribute / have in version control
-conf/docker-aio/dv/deps/
-
-# no need to check aoi installer zip into vc
-conf/docker-aio/dv/install/dvinstall.zip
-# or copy of test data
-conf/docker-aio/testdata/
-
-# docker-aio creates maven/ which reports 86 new files. ignore this wd.
-maven/
-
 scripts/installer/default.config
 *.pem
 
@@ -71,8 +59,5 @@ scripts/search/data/binary/trees.png.thumb140
 src/main/webapp/resources/images/cc0.png.thumb140
 src/main/webapp/resources/images/dataverseproject.png.thumb140
 
-# apache-maven is downloaded by docker-aio
-apache-maven*
-
 # Docker development volumes
 /docker-dev-volumes
diff --git a/Vagrantfile b/Vagrantfile
deleted file mode 100644
index 8293fbaf5fc..00000000000
--- a/Vagrantfile
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- mode: ruby -*-
-# vi: set ft=ruby :
-
-VAGRANTFILE_API_VERSION = "2"
-
-Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
-  config.vm.box = "bento/rockylinux-8.4"
-
-  config.vm.provider "virtualbox" do |vbox|
-    vbox.cpus = 4
-    vbox.memory = 4096
-  end
-
-  config.vm.provision "shell", path: "scripts/vagrant/setup.sh"
-  config.vm.provision "shell", path: "scripts/vagrant/setup-solr.sh"
-  config.vm.provision "shell", path: "scripts/vagrant/install-dataverse.sh"
-
-  config.vm.network "private_network", type: "dhcp"
-  config.vm.network "forwarded_port", guest: 80, host: 8888
-  config.vm.network "forwarded_port", guest: 443, host: 9999
-  config.vm.network "forwarded_port", guest: 8983, host: 8993
-  config.vm.network "forwarded_port", guest: 8080, host: 8088
-  config.vm.network "forwarded_port", guest: 8181, host: 8188
-
-  config.vm.synced_folder ".", "/dataverse"
-
-end
diff --git a/conf/docker-aio/0prep_deps.sh b/conf/docker-aio/0prep_deps.sh
deleted file mode 100755
index 13a91705303..00000000000
--- a/conf/docker-aio/0prep_deps.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/sh
-if [ ! -d dv/deps ]; then
-	mkdir -p dv/deps
-fi
-wdir=`pwd`
-
-if [ ! -e dv/deps/payara-5.2022.3.zip ]; then
-	echo "payara dependency prep"
-	wget https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2022.3/payara-5.2022.3.zip  -O dv/deps/payara-5.2022.3.zip
-fi
-
-if [ ! -e dv/deps/solr-8.11.1dv.tgz ]; then
-	echo "solr dependency prep"
-	# schema changes *should* be the only ones...
-	cd dv/deps/	
-	wget https://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz -O solr-8.11.1dv.tgz
-	cd ../../
-fi
-
diff --git a/conf/docker-aio/1prep.sh b/conf/docker-aio/1prep.sh
deleted file mode 100755
index 508d41d93ff..00000000000
--- a/conf/docker-aio/1prep.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash
-
-# move things necessary for integration tests into build context.
-# this was based off the phoenix deployment; and is likely uglier and bulkier than necessary in a perfect world
-
-mkdir -p testdata/doc/sphinx-guides/source/_static/util/
-cp ../solr/8.11.1/schema*.xml testdata/
-cp ../solr/8.11.1/solrconfig.xml testdata/
-cp ../jhove/jhove.conf testdata/
-cp ../jhove/jhoveConfig.xsd testdata/
-cd ../../
-cp -r scripts conf/docker-aio/testdata/
-cp doc/sphinx-guides/source/_static/util/createsequence.sql conf/docker-aio/testdata/doc/sphinx-guides/source/_static/util/
-
-wget -q https://downloads.apache.org/maven/maven-3/3.8.5/binaries/apache-maven-3.8.5-bin.tar.gz
-tar xfz apache-maven-3.8.5-bin.tar.gz
-mkdir maven
-mv apache-maven-3.8.5/* maven/
-echo "export JAVA_HOME=/usr/lib/jvm/jre-openjdk" > maven/maven.sh
-echo "export M2_HOME=../maven" >> maven/maven.sh
-echo "export MAVEN_HOME=../maven" >> maven/maven.sh
-echo "export PATH=../maven/bin:${PATH}" >> maven/maven.sh
-chmod 0755 maven/maven.sh
-
-# not using dvinstall.zip for setupIT.bash; but still used in install.bash for normal ops
-source maven/maven.sh && mvn clean
-./scripts/installer/custom-build-number
-source maven/maven.sh && mvn package
-cd scripts/installer
-make clean
-make
-mkdir -p ../../conf/docker-aio/dv/install
-cp dvinstall.zip ../../conf/docker-aio/dv/install/
-
-# ITs sometimes need files server-side
-# yes, these copies could be avoided by moving the build root here. but the build 
-#  context is already big enough that it seems worth avoiding.
-cd ../../
-cp src/test/java/edu/harvard/iq/dataverse/makedatacount/sushi_sample_logs.json conf/docker-aio/testdata/
diff --git a/conf/docker-aio/c8.dockerfile b/conf/docker-aio/c8.dockerfile
deleted file mode 100644
index 0002464cbf2..00000000000
--- a/conf/docker-aio/c8.dockerfile
+++ /dev/null
@@ -1,87 +0,0 @@
-FROM rockylinux/rockylinux:latest
-# OS dependencies
-# IQSS now recommends Postgres 13.
-RUN dnf -qy module disable postgresql
-RUN yum install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-8-x86_64/pgdg-redhat-repo-latest.noarch.rpm
-
-RUN echo "fastestmirror=true" >> /etc/dnf/dnf.conf
-RUN yum install -y java-11-openjdk-devel postgresql13-server sudo epel-release unzip curl httpd python2 diffutils
-RUN yum install -y jq lsof awscli
-
-# for older search scripts
-RUN ln -s /usr/bin/python2 /usr/bin/python
-
-# copy and unpack dependencies (solr, payara)
-COPY dv /tmp/dv
-COPY testdata/schema*.xml /tmp/dv/
-COPY testdata/solrconfig.xml /tmp/dv
-
-# ITs need files
-COPY testdata/sushi_sample_logs.json /tmp/
-
-# IPv6 and localhost appears to be related to some of the intermittant connection issues
-COPY disableipv6.conf /etc/sysctl.d/
-RUN rm /etc/httpd/conf/*
-COPY httpd.conf /etc/httpd/conf 
-RUN cd /opt ; tar zxf /tmp/dv/deps/solr-8.11.1dv.tgz
-RUN cd /opt ; unzip /tmp/dv/deps/payara-5.2022.3.zip ; ln -s /opt/payara5 /opt/glassfish4
-
-# this copy of domain.xml is the result of running `asadmin set server.monitoring-service.module-monitoring-levels.jvm=LOW` on a default glassfish installation (aka - enable the glassfish REST monitir endpoint for the jvm`
-# this dies under Java 11, do we keep it?
-#COPY domain-restmonitor.xml /opt/payara5/glassfish/domains/domain1/config/domain.xml
-
-RUN sudo -u postgres /usr/pgsql-13/bin/initdb -D /var/lib/pgsql/13/data -E 'UTF-8'
-
-# copy configuration related files
-RUN cp /tmp/dv/pg_hba.conf /var/lib/pgsql/13/data/
-RUN cp -r /opt/solr-8.11.1/server/solr/configsets/_default /opt/solr-8.11.1/server/solr/collection1
-RUN cp /tmp/dv/schema*.xml /opt/solr-8.11.1/server/solr/collection1/conf/
-RUN cp /tmp/dv/solrconfig.xml /opt/solr-8.11.1/server/solr/collection1/conf/solrconfig.xml
-
-# skipping payara user and solr user (run both as root)
-
-#solr port
-EXPOSE 8983
-
-# postgres port
-EXPOSE 5432
-
-# payara port
-EXPOSE 8080
-
-# apache port, http
-EXPOSE 80
-
-# debugger ports (jmx,jdb)
-EXPOSE 8686
-EXPOSE 9009
-
-RUN mkdir /opt/dv
-
-# keeping the symlink on the off chance that something else is still assuming /usr/local/glassfish4
-RUN ln -s /opt/payara5 /usr/local/glassfish4
-COPY dv/install/ /opt/dv/
-COPY install.bash /opt/dv/
-COPY entrypoint.bash /opt/dv/
-COPY testdata /opt/dv/testdata
-COPY testscripts/* /opt/dv/testdata/
-COPY setupIT.bash /opt/dv
-WORKDIR /opt/dv
-
-# need to take DOI provider info from build args as of ec377d2a4e27424db8815c55ce544deee48fc5e0
-# Default to EZID; use built-args to switch to DataCite (or potentially handles)
-#ARG DoiProvider=EZID
-ARG DoiProvider=FAKE
-ARG doi_baseurl=https://ezid.cdlib.org
-ARG doi_username=apitest
-ARG doi_password=apitest
-ENV DoiProvider=${DoiProvider}
-ENV doi_baseurl=${doi_baseurl}
-ENV doi_username=${doi_username}
-ENV doi_password=${doi_password}
-COPY configure_doi.bash /opt/dv
-
-# healthcheck for payara only (assumes modified domain.xml);
-#  does not check dataverse application status.
-HEALTHCHECK CMD curl --fail http://localhost:4848/monitoring/domain/server.json || exit 1
-CMD ["/opt/dv/entrypoint.bash"]
diff --git a/conf/docker-aio/configure_doi.bash b/conf/docker-aio/configure_doi.bash
deleted file mode 100755
index f0f0bc6d0d4..00000000000
--- a/conf/docker-aio/configure_doi.bash
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env bash
-
-cd /opt/payara5
-
-# if appropriate; reconfigure PID provider on the basis of environmental variables.
-if [ ! -z "${DoiProvider}" ]; then
-        curl -X PUT -d ${DoiProvider} http://localhost:8080/api/admin/settings/:DoiProvider
-fi
-if [ ! -z "${doi_username}" ]; then
-        bin/asadmin create-jvm-options "-Ddoi.username=${doi_username}"
-fi
-if [ ! -z "${doi_password}" ]; then
-        bin/asadmin create-jvm-options "-Ddoi.password=${doi_password}"
-fi
-if [ ! -z "${doi_baseurl}" ]; then
-        bin/asadmin delete-jvm-options "-Ddoi.baseurlstring=https\://mds.test.datacite.org"
-        doi_baseurl_esc=`echo ${doi_baseurl} | sed -e 's/:/\\\:/'`
-        bin/asadmin create-jvm-options "-Ddoi.baseurlstring=${doi_baseurl_esc}"
-fi
-if [ ! -z "${doi_dataciterestapiurl}" ]; then
-        bin/asadmin delete-jvm-options "-Ddoi.dataciterestapiurlstring=https\://api.test.datacite.org"
-        doi_dataciterestapiurl_esc=`echo ${doi_dataciterestapiurl} | sed -e 's/:/\\\:/'`
-        bin/asadmin create-jvm-options "-Ddoi.dataciterestapiurlstring=${doi_dataciterestapiurl_esc}"
-fi
diff --git a/conf/docker-aio/disableipv6.conf b/conf/docker-aio/disableipv6.conf
deleted file mode 100644
index 8d425183e3f..00000000000
--- a/conf/docker-aio/disableipv6.conf
+++ /dev/null
@@ -1 +0,0 @@
-net.ipv6.conf.all.disable_ipv6 = 1
diff --git a/conf/docker-aio/domain-restmonitor.xml b/conf/docker-aio/domain-restmonitor.xml
deleted file mode 100644
index a18a88ab011..00000000000
--- a/conf/docker-aio/domain-restmonitor.xml
+++ /dev/null
@@ -1,486 +0,0 @@
-<!--
-
-    DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
-
-    Copyright (c) 2012-2014 Oracle and/or its affiliates. All rights reserved.
-
-    The contents of this file are subject to the terms of either the GNU
-    General Public License Version 2 only ("GPL") or the Common Development
-    and Distribution License("CDDL") (collectively, the "License").  You
-    may not use this file except in compliance with the License.  You can
-    obtain a copy of the License at
-    https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html
-    or packager/legal/LICENSE.txt.  See the License for the specific
-    language governing permissions and limitations under the License.
-
-    When distributing the software, include this License Header Notice in each
-    file and include the License file at packager/legal/LICENSE.txt.
-
-    GPL Classpath Exception:
-    Oracle designates this particular file as subject to the "Classpath"
-    exception as provided by Oracle in the GPL Version 2 section of the License
-    file that accompanied this code.
-
-    Modifications:
-    If applicable, add the following below the License Header, with the fields
-    enclosed by brackets [] replaced by your own identifying information:
-    "Portions Copyright [year] [name of copyright owner]"
-
-    Contributor(s):
-    If you wish your version of this file to be governed by only the CDDL or
-    only the GPL Version 2, indicate your decision by adding "[Contributor]
-    elects to include this software in this distribution under the [CDDL or GPL
-    Version 2] license."  If you don't indicate a single choice of license, a
-    recipient has the option to distribute your version of this file under
-    either the CDDL, the GPL Version 2 or to extend the choice of license to
-    its licensees as provided above.  However, if you add GPL Version 2 code
-    and therefore, elected the GPL Version 2 license, then the option applies
-    only if the new code is made subject to such option by the copyright
-    holder.
-
--->
-
-<domain log-root="${com.sun.aas.instanceRoot}/logs" application-root="${com.sun.aas.instanceRoot}/applications" version="10.0">
-<security-configurations>
-    <authentication-service default="true" name="adminAuth" use-password-credential="true">
-      <security-provider name="spcrealm" type="LoginModule" provider-name="adminSpc">
-        <login-module-config name="adminSpecialLM" control-flag="sufficient" module-class="com.sun.enterprise.admin.util.AdminLoginModule">
-          <property name="config" value="server-config"></property>
-          <property name="auth-realm" value="admin-realm"></property>
-        </login-module-config>
-      </security-provider>
-      <security-provider name="filerealm" type="LoginModule" provider-name="adminFile">
-        <login-module-config name="adminFileLM" control-flag="sufficient" module-class="com.sun.enterprise.security.auth.login.FileLoginModule">
-          <property name="config" value="server-config"></property>
-          <property name="auth-realm" value="admin-realm"></property>
-        </login-module-config>
-      </security-provider>
-    </authentication-service>
-    <authorization-service default="true" name="authorizationService">
-      <security-provider name="simpleAuthorization" type="Simple" provider-name="simpleAuthorizationProvider">
-        <authorization-provider-config support-policy-deploy="false" name="simpleAuthorizationProviderConfig"></authorization-provider-config>
-      </security-provider>
-    </authorization-service>
-  </security-configurations>
-  <system-applications />
-  <resources>
-    <jdbc-resource pool-name="__TimerPool" jndi-name="jdbc/__TimerPool" object-type="system-admin" />
-    <jdbc-resource pool-name="DerbyPool" jndi-name="jdbc/__default" object-type="system-all" />
-    <jdbc-connection-pool name="__TimerPool" datasource-classname="org.apache.derby.jdbc.EmbeddedXADataSource" res-type="javax.sql.XADataSource">
-      <property value="${com.sun.aas.instanceRoot}/lib/databases/ejbtimer" name="databaseName" />
-      <property value=";create=true" name="connectionAttributes" />
-    </jdbc-connection-pool>
-    <jdbc-connection-pool is-isolation-level-guaranteed="false" name="DerbyPool" datasource-classname="org.apache.derby.jdbc.ClientDataSource" res-type="javax.sql.DataSource">
-      <property value="1527" name="PortNumber" />
-      <property value="APP" name="Password" />
-      <property value="APP" name="User" />
-      <property value="localhost" name="serverName" />
-      <property value="sun-appserv-samples" name="DatabaseName" />
-      <property value=";create=true" name="connectionAttributes" />
-    </jdbc-connection-pool>
-  </resources>
-  <servers>
-    <server name="server" config-ref="server-config">
-      <resource-ref ref="jdbc/__TimerPool" />
-      <resource-ref ref="jdbc/__default" />
-    </server>
-  </servers>
-  <nodes>
-    <node name="localhost-domain1" type="CONFIG" node-host="localhost" install-dir="${com.sun.aas.productRoot}"/>
-  </nodes>
- <configs>
-   <config name="server-config">
-      <system-property name="JMS_PROVIDER_PORT" value="7676" description="Port Number that JMS Service will listen for remote clients connection." />
-      
-      <http-service>
-        <access-log/>
-        <virtual-server id="server" network-listeners="http-listener-1,http-listener-2"/>
-        <virtual-server id="__asadmin" network-listeners="admin-listener"/>
-      </http-service>
-      <iiop-service>
-        <orb use-thread-pool-ids="thread-pool-1" />
-        <iiop-listener address="0.0.0.0" port="3700" id="orb-listener-1" lazy-init="true"/>
-        <iiop-listener security-enabled="true" address="0.0.0.0" port="3820" id="SSL">
-          <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as" />
-        </iiop-listener>
-        <iiop-listener security-enabled="true" address="0.0.0.0" port="3920" id="SSL_MUTUALAUTH">
-          <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as" client-auth-enabled="true" />
-        </iiop-listener>
-      </iiop-service>
-      <admin-service auth-realm-name="admin-realm" type="das-and-server" system-jmx-connector-name="system">
-        <jmx-connector auth-realm-name="admin-realm" security-enabled="false" address="0.0.0.0" port="8686" name="system" />
-        <property value="/admin" name="adminConsoleContextRoot" />
-        <property value="${com.sun.aas.installRoot}/lib/install/applications/admingui.war" name="adminConsoleDownloadLocation" />
-        <property value="${com.sun.aas.installRoot}/.." name="ipsRoot" />
-      </admin-service>
-      <connector-service shutdown-timeout-in-seconds="30">
-      </connector-service>
-       <transaction-service tx-log-dir="${com.sun.aas.instanceRoot}/logs" />
-       <diagnostic-service />
-      <security-service>
-        <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="admin-realm">
-          <property value="${com.sun.aas.instanceRoot}/config/admin-keyfile" name="file" />
-          <property value="fileRealm" name="jaas-context" />
-        </auth-realm>
-        <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="file">
-          <property value="${com.sun.aas.instanceRoot}/config/keyfile" name="file" />
-          <property value="fileRealm" name="jaas-context" />
-        </auth-realm>
-        <auth-realm classname="com.sun.enterprise.security.auth.realm.certificate.CertificateRealm" name="certificate" />
-        <jacc-provider policy-configuration-factory-provider="com.sun.enterprise.security.provider.PolicyConfigurationFactoryImpl" policy-provider="com.sun.enterprise.security.provider.PolicyWrapper" name="default">
-          <property value="${com.sun.aas.instanceRoot}/generated/policy" name="repository" />
-        </jacc-provider>
-        <jacc-provider policy-configuration-factory-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyConfigurationFactory" policy-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyProvider" name="simple" />
-        <audit-module classname="com.sun.enterprise.security.ee.Audit" name="default">
-          <property value="false" name="auditOn" />
-        </audit-module>
-        <message-security-config auth-layer="SOAP">
-          <provider-config provider-id="XWS_ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule" provider-type="client">
-            <request-policy auth-source="content" />
-            <response-policy auth-source="content" />
-            <property value="s1as" name="encryption.key.alias" />
-            <property value="s1as" name="signature.key.alias" />
-            <property value="false" name="dynamic.username.password" />
-            <property value="false" name="debug" />
-          </provider-config>
-          <provider-config provider-id="ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule" provider-type="client">
-            <request-policy auth-source="content" />
-            <response-policy auth-source="content" />
-            <property value="s1as" name="encryption.key.alias" />
-            <property value="s1as" name="signature.key.alias" />
-            <property value="false" name="dynamic.username.password" />
-            <property value="false" name="debug" />
-            <property value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml" name="security.config" />
-          </provider-config>
-          <provider-config provider-id="XWS_ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule" provider-type="server">
-            <request-policy auth-source="content" />
-            <response-policy auth-source="content" />
-            <property value="s1as" name="encryption.key.alias" />
-            <property value="s1as" name="signature.key.alias" />
-            <property value="false" name="debug" />
-          </provider-config>
-          <provider-config provider-id="ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule" provider-type="server">
-            <request-policy auth-source="content" />
-            <response-policy auth-source="content" />
-            <property value="s1as" name="encryption.key.alias" />
-            <property value="s1as" name="signature.key.alias" />
-            <property value="false" name="debug" />
-            <property value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml" name="security.config" />
-          </provider-config>
-        </message-security-config>
-        <message-security-config auth-layer="HttpServlet">
-            <provider-config provider-type="server" provider-id="GFConsoleAuthModule" class-name="org.glassfish.admingui.common.security.AdminConsoleAuthModule">
-                <request-policy auth-source="sender"></request-policy>
-                <response-policy></response-policy>
-                <property name="loginPage" value="/login.jsf"></property>
-                <property name="loginErrorPage" value="/loginError.jsf"></property>
-            </provider-config>
-        </message-security-config>
-	<property value="SHA-256" name="default-digest-algorithm" />
-      </security-service>
-      <java-config classpath-suffix="" system-classpath="" debug-options="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=9009">
-        <jvm-options>-XX:MaxPermSize=192m</jvm-options>
-        <jvm-options>-client</jvm-options>
-        <jvm-options>-Djava.awt.headless=true</jvm-options>
-        <jvm-options>-Djdk.corba.allowOutputStreamSubclass=true</jvm-options>
-        <jvm-options>-Djavax.xml.accessExternalSchema=all</jvm-options>
-        <jvm-options>-Djavax.management.builder.initial=com.sun.enterprise.v3.admin.AppServerMBeanServerBuilder</jvm-options>
-        <jvm-options>-XX:+UnlockDiagnosticVMOptions</jvm-options>
-        <jvm-options>-Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed</jvm-options>
-        <jvm-options>-Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy</jvm-options>
-        <jvm-options>-Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf</jvm-options>
-        <jvm-options>-Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as</jvm-options>
-        <jvm-options>-Xmx512m</jvm-options>
-        <jvm-options>-Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks</jvm-options>
-        <jvm-options>-Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks</jvm-options>
-        <jvm-options>-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext</jvm-options>
-        <jvm-options>-Djdbc.drivers=org.apache.derby.jdbc.ClientDriver</jvm-options>
-		<jvm-options>-DANTLR_USE_DIRECT_CLASS_LOADING=true</jvm-options>
-        <jvm-options>-Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory</jvm-options>
-        <!-- Configure post startup bundle list here. This is a comma separated list of bundle sybolic names. -->
-        <jvm-options>-Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.shell.remote,org.apache.felix.fileinstall</jvm-options>
-        <!-- Configuration of various third-party OSGi bundles like
-             Felix Remote Shell, FileInstall, etc. -->
-        <!-- Port on which remote shell listens for connections.-->
-        <jvm-options>-Dosgi.shell.telnet.port=6666</jvm-options>
-        <!-- How many concurrent users can connect to this remote shell -->
-        <jvm-options>-Dosgi.shell.telnet.maxconn=1</jvm-options>
-        <!-- From which hosts users can connect -->
-        <jvm-options>-Dosgi.shell.telnet.ip=127.0.0.1</jvm-options>
-        <!-- Gogo shell configuration -->
-        <jvm-options>-Dgosh.args=--nointeractive</jvm-options>
-        <!-- Directory being watched by fileinstall. -->
-        <jvm-options>-Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/</jvm-options>
-        <!-- Time period fileinstaller thread in ms. -->
-        <jvm-options>-Dfelix.fileinstall.poll=5000</jvm-options>
-        <!-- log level: 1 for error, 2 for warning, 3 for info and 4 for debug. -->
-        <jvm-options>-Dfelix.fileinstall.log.level=2</jvm-options>
-        <!-- should new bundles be started or installed only? 
-             true => start, false => only install 
-        -->
-        <jvm-options>-Dfelix.fileinstall.bundles.new.start=true</jvm-options>
-        <!-- should watched bundles be started transiently or persistently -->
-        <jvm-options>-Dfelix.fileinstall.bundles.startTransient=true</jvm-options>
-        <!-- Should changes to configuration be saved in corresponding cfg file? false: no, true: yes
-             If we don't set false, everytime server starts from clean osgi cache, the file gets rewritten.
-        -->
-        <jvm-options>-Dfelix.fileinstall.disableConfigSave=false</jvm-options>
-        <!-- End of OSGi bundle configurations -->
-        <jvm-options>-XX:NewRatio=2</jvm-options>
-        <!-- Woodstox property needed to pass StAX TCK -->
-        <jvm-options>-Dcom.ctc.wstx.returnNullForDefaultNamespace=true</jvm-options>
-      </java-config>
-      <network-config>
-        <protocols>
-          <protocol name="http-listener-1">
-            <http default-virtual-server="server" max-connections="250">
-              <file-cache enabled="false"></file-cache>
-            </http>
-          </protocol>
-          <protocol security-enabled="true" name="http-listener-2">
-            <http default-virtual-server="server" max-connections="250">
-              <file-cache enabled="false"></file-cache>
-            </http>
-            <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" ssl3-enabled="false" cert-nickname="s1as"></ssl>
-          </protocol>
-          <protocol name="admin-listener">
-            <http default-virtual-server="__asadmin" max-connections="250" encoded-slash-enabled="true" >
-              <file-cache enabled="false"></file-cache>
-            </http>
-          </protocol>
-        </protocols>
-        <network-listeners>
-          <network-listener port="8080" protocol="http-listener-1" transport="tcp" name="http-listener-1" thread-pool="http-thread-pool"></network-listener>
-          <network-listener port="8181" protocol="http-listener-2" transport="tcp" name="http-listener-2" thread-pool="http-thread-pool"></network-listener>
-          <network-listener port="4848" protocol="admin-listener" transport="tcp" name="admin-listener" thread-pool="admin-thread-pool"></network-listener>
-        </network-listeners>
-        <transports>
-          <transport name="tcp"></transport>
-        </transports>
-      </network-config>
-      <thread-pools>
-          <thread-pool name="admin-thread-pool" max-thread-pool-size="50" max-queue-size="256"></thread-pool>
-          <thread-pool name="http-thread-pool" max-queue-size="4096"></thread-pool>
-          <thread-pool name="thread-pool-1" max-thread-pool-size="200"/>
-  </thread-pools>
-  <!-- try to enable REST monitoring by editing config -->
-      <monitoring-service>
-        <module-monitoring-levels jvm="LOW"></module-monitoring-levels>
-      </monitoring-service>
-    </config>
-     <config name="default-config" dynamic-reconfiguration-enabled="true" >
-         <http-service>
-             <access-log/>
-             <virtual-server id="server" network-listeners="http-listener-1, http-listener-2" >
-                 <property name="default-web-xml" value="${com.sun.aas.instanceRoot}/config/default-web.xml"/>
-             </virtual-server>
-             <virtual-server id="__asadmin" network-listeners="admin-listener" />
-         </http-service>
-         <iiop-service>
-             <orb use-thread-pool-ids="thread-pool-1" />
-             <iiop-listener port="${IIOP_LISTENER_PORT}" id="orb-listener-1" address="0.0.0.0" />
-             <iiop-listener port="${IIOP_SSL_LISTENER_PORT}" id="SSL" address="0.0.0.0" security-enabled="true">
-                 <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as" />
-             </iiop-listener>
-             <iiop-listener port="${IIOP_SSL_MUTUALAUTH_PORT}" id="SSL_MUTUALAUTH" address="0.0.0.0" security-enabled="true">
-                 <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as" client-auth-enabled="true" />
-             </iiop-listener>
-         </iiop-service>
-         <admin-service system-jmx-connector-name="system" type="server">
-             <!-- JSR 160  "system-jmx-connector" -->
-             <jmx-connector address="0.0.0.0" auth-realm-name="admin-realm" name="system" port="${JMX_SYSTEM_CONNECTOR_PORT}" protocol="rmi_jrmp" security-enabled="false"/>
-             <!-- JSR 160  "system-jmx-connector" -->
-             <property value="${com.sun.aas.installRoot}/lib/install/applications/admingui.war" name="adminConsoleDownloadLocation" />
-         </admin-service>
-         <web-container>
-             <session-config>
-                 <session-manager>
-                     <manager-properties/>
-                     <store-properties />
-                 </session-manager>
-                 <session-properties />
-             </session-config>
-         </web-container>
-         <ejb-container session-store="${com.sun.aas.instanceRoot}/session-store">
-             <ejb-timer-service />
-         </ejb-container>
-         <mdb-container />
-         <jms-service type="EMBEDDED" default-jms-host="default_JMS_host" addresslist-behavior="priority">
-             <jms-host name="default_JMS_host" host="localhost" port="${JMS_PROVIDER_PORT}" admin-user-name="admin" admin-password="admin" lazy-init="true"/>
-         </jms-service>
-         <log-service log-rotation-limit-in-bytes="2000000" file="${com.sun.aas.instanceRoot}/logs/server.log">
-             <module-log-levels />
-         </log-service>
-         <security-service>
-             <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="admin-realm">
-                 <property name="file" value="${com.sun.aas.instanceRoot}/config/admin-keyfile" />
-                 <property name="jaas-context" value="fileRealm" />
-             </auth-realm>
-             <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="file">
-                 <property name="file" value="${com.sun.aas.instanceRoot}/config/keyfile" />
-                 <property name="jaas-context" value="fileRealm" />
-             </auth-realm>
-             <auth-realm classname="com.sun.enterprise.security.auth.realm.certificate.CertificateRealm" name="certificate" />
-             <jacc-provider policy-provider="com.sun.enterprise.security.provider.PolicyWrapper" name="default" policy-configuration-factory-provider="com.sun.enterprise.security.provider.PolicyConfigurationFactoryImpl">
-                 <property name="repository" value="${com.sun.aas.instanceRoot}/generated/policy" />
-             </jacc-provider>
-             <jacc-provider policy-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyProvider" name="simple" policy-configuration-factory-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyConfigurationFactory" />
-             <audit-module classname="com.sun.enterprise.security.ee.Audit" name="default">
-                 <property value="false" name="auditOn" />
-             </audit-module>
-             <message-security-config auth-layer="SOAP">
-                 <provider-config provider-type="client" provider-id="XWS_ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule">
-                     <request-policy auth-source="content" />
-                     <response-policy auth-source="content" />
-                     <property name="encryption.key.alias" value="s1as" />
-                     <property name="signature.key.alias" value="s1as" />
-                     <property name="dynamic.username.password" value="false" />
-                     <property name="debug" value="false" />
-                 </provider-config>
-                 <provider-config provider-type="client" provider-id="ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule">
-                     <request-policy auth-source="content" />
-                     <response-policy auth-source="content" />
-                     <property name="encryption.key.alias" value="s1as" />
-                     <property name="signature.key.alias" value="s1as" />
-                     <property name="dynamic.username.password" value="false" />
-                     <property name="debug" value="false" />
-                     <property name="security.config" value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml" />
-                 </provider-config>
-                 <provider-config provider-type="server" provider-id="XWS_ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule">
-                     <request-policy auth-source="content" />
-                     <response-policy auth-source="content" />
-                     <property name="encryption.key.alias" value="s1as" />
-                     <property name="signature.key.alias" value="s1as" />
-                     <property name="debug" value="false" />
-                 </provider-config>
-                 <provider-config provider-type="server" provider-id="ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule">
-                     <request-policy auth-source="content" />
-                     <response-policy auth-source="content" />
-                     <property name="encryption.key.alias" value="s1as" />
-                     <property name="signature.key.alias" value="s1as" />
-                     <property name="debug" value="false" />
-                     <property name="security.config" value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml" />
-                 </provider-config>
-             </message-security-config>
-         </security-service>
-         <transaction-service tx-log-dir="${com.sun.aas.instanceRoot}/logs" automatic-recovery="true" />
-         <diagnostic-service />
-         <java-config debug-options="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=${JAVA_DEBUGGER_PORT}" system-classpath="" classpath-suffix="">
-             <jvm-options>-XX:MaxPermSize=192m</jvm-options>
-             <jvm-options>-server</jvm-options>
-             <jvm-options>-Djava.awt.headless=true</jvm-options>
-             <jvm-options>-Djdk.corba.allowOutputStreamSubclass=true</jvm-options>
-             <jvm-options>-XX:+UnlockDiagnosticVMOptions</jvm-options>
-             <jvm-options>-Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed</jvm-options>
-             <jvm-options>-Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy</jvm-options>
-             <jvm-options>-Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf</jvm-options>
-             <jvm-options>-Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as</jvm-options>
-             <jvm-options>-Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks</jvm-options>
-             <jvm-options>-Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks</jvm-options>
-             <jvm-options>-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext</jvm-options>
-             <jvm-options>-Djdbc.drivers=org.apache.derby.jdbc.ClientDriver</jvm-options>
-             <jvm-options>-DANTLR_USE_DIRECT_CLASS_LOADING=true</jvm-options>
-             <jvm-options>-Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory</jvm-options>
-             <jvm-options>-XX:NewRatio=2</jvm-options>
-             <jvm-options>-Xmx512m</jvm-options>
-             <!-- Configure post startup bundle list here. This is a comma separated list of bundle sybolic names.
-                  The remote shell bundle has been disabled for cluster and remote instances. -->
-             <jvm-options>-Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.fileinstall</jvm-options>
-             <!-- Port on which remote shell listens for connections.-->
-             <jvm-options>-Dosgi.shell.telnet.port=${OSGI_SHELL_TELNET_PORT}</jvm-options>
-             <!-- How many concurrent users can connect to this remote shell -->
-             <jvm-options>-Dosgi.shell.telnet.maxconn=1</jvm-options>
-             <!-- From which hosts users can connect -->
-             <jvm-options>-Dosgi.shell.telnet.ip=127.0.0.1</jvm-options>
-             <!-- Gogo shell configuration -->
-             <jvm-options>-Dgosh.args=--noshutdown -c noop=true</jvm-options>
-             <!-- Directory being watched by fileinstall. -->
-             <jvm-options>-Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/</jvm-options>
-             <!-- Time period fileinstaller thread in ms. -->
-             <jvm-options>-Dfelix.fileinstall.poll=5000</jvm-options>
-             <!-- log level: 1 for error, 2 for warning, 3 for info and 4 for debug. -->
-             <jvm-options>-Dfelix.fileinstall.log.level=3</jvm-options>
-             <!-- should new bundles be started or installed only?
-                 true => start, false => only install
-             -->
-             <jvm-options>-Dfelix.fileinstall.bundles.new.start=true</jvm-options>
-             <!-- should watched bundles be started transiently or persistently -->
-             <jvm-options>-Dfelix.fileinstall.bundles.startTransient=true</jvm-options>
-             <!-- Should changes to configuration be saved in corresponding cfg file? false: no, true: yes
-                  If we don't set false, everytime server starts from clean osgi cache, the file gets rewritten.
-             -->
-             <jvm-options>-Dfelix.fileinstall.disableConfigSave=false</jvm-options>
-             <!-- End of OSGi bundle configurations -->
-        </java-config>
-         <availability-service>
-             <web-container-availability/>
-             <ejb-container-availability sfsb-store-pool-name="jdbc/hastore"/>
-             <jms-availability/>
-         </availability-service>
-         <network-config>
-             <protocols>
-                 <protocol name="http-listener-1">
-                     <http default-virtual-server="server">
-                         <file-cache />
-                     </http>
-                 </protocol>
-                 <protocol security-enabled="true" name="http-listener-2">
-                     <http default-virtual-server="server">
-                         <file-cache />
-                     </http>
-                     <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" ssl3-enabled="false" cert-nickname="s1as" />
-                 </protocol>
-                 <protocol name="admin-listener">
-                     <http default-virtual-server="__asadmin" max-connections="250">
-                         <file-cache enabled="false" />
-                     </http>
-                 </protocol>
-                 <protocol security-enabled="true" name="sec-admin-listener">
-                   <http default-virtual-server="__asadmin" encoded-slash-enabled="true">
-                     <file-cache></file-cache>
-                   </http>
-                   <ssl client-auth="want" ssl3-enabled="false" classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="glassfish-instance" renegotiate-on-client-auth-want="false"></ssl>
-                 </protocol>
-                 <protocol name="admin-http-redirect">
-                   <http-redirect secure="true"></http-redirect>
-                 </protocol>
-                 <protocol name="pu-protocol">
-                   <port-unification>
-                     <protocol-finder protocol="sec-admin-listener" name="http-finder" classname="org.glassfish.grizzly.config.portunif.HttpProtocolFinder"></protocol-finder>
-                     <protocol-finder protocol="admin-http-redirect" name="admin-http-redirect" classname="org.glassfish.grizzly.config.portunif.HttpProtocolFinder"></protocol-finder>
-                   </port-unification>
-                 </protocol>
-
-             </protocols>
-             <network-listeners>
-                 <network-listener address="0.0.0.0" port="${HTTP_LISTENER_PORT}" protocol="http-listener-1" transport="tcp" name="http-listener-1" thread-pool="http-thread-pool" />
-                 <network-listener address="0.0.0.0" port="${HTTP_SSL_LISTENER_PORT}" protocol="http-listener-2" transport="tcp" name="http-listener-2" thread-pool="http-thread-pool" />
-                 <network-listener port="${ASADMIN_LISTENER_PORT}" protocol="pu-protocol" transport="tcp" name="admin-listener" thread-pool="http-thread-pool" />
-             </network-listeners>
-             <transports>
-                 <transport name="tcp" />
-             </transports>
-         </network-config>
-         <thread-pools>
-             <thread-pool name="http-thread-pool" />
-             <thread-pool max-thread-pool-size="200" idle-thread-timeout-in-seconds="120" name="thread-pool-1" />
-         </thread-pools>
-         <group-management-service/>
-         <system-property name="JMS_PROVIDER_PORT" value="27676" description="Port Number that JMS Service will listen for remote clients connection." />
-         <system-property name="ASADMIN_LISTENER_PORT" value="24848"/>
-         <system-property name="HTTP_LISTENER_PORT" value="28080"/>
-         <system-property name="HTTP_SSL_LISTENER_PORT" value="28181"/>
-         <system-property name="IIOP_LISTENER_PORT" value="23700"/>
-         <system-property name="IIOP_SSL_LISTENER_PORT" value="23820"/>
-         <system-property name="IIOP_SSL_MUTUALAUTH_PORT" value="23920"/>
-         <system-property name="JMX_SYSTEM_CONNECTOR_PORT" value="28686"/>
-         <system-property name="OSGI_SHELL_TELNET_PORT" value="26666"/>
-         <system-property name="JAVA_DEBUGGER_PORT" value="29009"/>
-     </config>
-  </configs>
-  <property name="administrative.domain.name" value="domain1"/>
-  <secure-admin special-admin-indicator="718fe3ff-df18-49f8-84a0-3aeedb3250db">
-      <secure-admin-principal dn="CN=localhost,OU=GlassFish,O=Oracle Corporation,L=Santa Clara,ST=California,C=US"></secure-admin-principal>
-      <secure-admin-principal dn="CN=localhost-instance,OU=GlassFish,O=Oracle Corporation,L=Santa Clara,ST=California,C=US"></secure-admin-principal>
-  </secure-admin>
-</domain>
diff --git a/conf/docker-aio/dv/install/default.config b/conf/docker-aio/dv/install/default.config
deleted file mode 100644
index 0b806a8714b..00000000000
--- a/conf/docker-aio/dv/install/default.config
+++ /dev/null
@@ -1,15 +0,0 @@
-HOST_DNS_ADDRESS	localhost
-GLASSFISH_DIRECTORY	/opt/glassfish4
-ADMIN_EMAIL	 
-MAIL_SERVER	mail.hmdc.harvard.edu
-POSTGRES_ADMIN_PASSWORD	secret
-POSTGRES_SERVER	db
-POSTGRES_PORT	5432
-POSTGRES_DATABASE	dvndb
-POSTGRES_USER	dvnapp
-POSTGRES_PASSWORD	secret
-SOLR_LOCATION	idx
-RSERVE_HOST	localhost
-RSERVE_PORT	6311
-RSERVE_USER	rserve
-RSERVE_PASSWORD	rserve
diff --git a/conf/docker-aio/dv/pg_hba.conf b/conf/docker-aio/dv/pg_hba.conf
deleted file mode 100644
index 77feba5247d..00000000000
--- a/conf/docker-aio/dv/pg_hba.conf
+++ /dev/null
@@ -1,91 +0,0 @@
-# PostgreSQL Client Authentication Configuration File
-# ===================================================
-#
-# Refer to the "Client Authentication" section in the PostgreSQL
-# documentation for a complete description of this file.  A short
-# synopsis follows.
-#
-# This file controls: which hosts are allowed to connect, how clients
-# are authenticated, which PostgreSQL user names they can use, which
-# databases they can access.  Records take one of these forms:
-#
-# local      DATABASE  USER  METHOD  [OPTIONS]
-# host       DATABASE  USER  ADDRESS  METHOD  [OPTIONS]
-# hostssl    DATABASE  USER  ADDRESS  METHOD  [OPTIONS]
-# hostnossl  DATABASE  USER  ADDRESS  METHOD  [OPTIONS]
-#
-# (The uppercase items must be replaced by actual values.)
-#
-# The first field is the connection type: "local" is a Unix-domain
-# socket, "host" is either a plain or SSL-encrypted TCP/IP socket,
-# "hostssl" is an SSL-encrypted TCP/IP socket, and "hostnossl" is a
-# plain TCP/IP socket.
-#
-# DATABASE can be "all", "sameuser", "samerole", "replication", a
-# database name, or a comma-separated list thereof. The "all"
-# keyword does not match "replication". Access to replication
-# must be enabled in a separate record (see example below).
-#
-# USER can be "all", a user name, a group name prefixed with "+", or a
-# comma-separated list thereof.  In both the DATABASE and USER fields
-# you can also write a file name prefixed with "@" to include names
-# from a separate file.
-#
-# ADDRESS specifies the set of hosts the record matches.  It can be a
-# host name, or it is made up of an IP address and a CIDR mask that is
-# an integer (between 0 and 32 (IPv4) or 128 (IPv6) inclusive) that
-# specifies the number of significant bits in the mask.  A host name
-# that starts with a dot (.) matches a suffix of the actual host name.
-# Alternatively, you can write an IP address and netmask in separate
-# columns to specify the set of hosts.  Instead of a CIDR-address, you
-# can write "samehost" to match any of the server's own IP addresses,
-# or "samenet" to match any address in any subnet that the server is
-# directly connected to.
-#
-# METHOD can be "trust", "reject", "md5", "password", "gss", "sspi",
-# "krb5", "ident", "peer", "pam", "ldap", "radius" or "cert".  Note that
-# "password" sends passwords in clear text; "md5" is preferred since
-# it sends encrypted passwords.
-#
-# OPTIONS are a set of options for the authentication in the format
-# NAME=VALUE.  The available options depend on the different
-# authentication methods -- refer to the "Client Authentication"
-# section in the documentation for a list of which options are
-# available for which authentication methods.
-#
-# Database and user names containing spaces, commas, quotes and other
-# special characters must be quoted.  Quoting one of the keywords
-# "all", "sameuser", "samerole" or "replication" makes the name lose
-# its special character, and just match a database or username with
-# that name.
-#
-# This file is read on server startup and when the postmaster receives
-# a SIGHUP signal.  If you edit the file on a running system, you have
-# to SIGHUP the postmaster for the changes to take effect.  You can
-# use "pg_ctl reload" to do that.
-
-# Put your actual configuration here
-# ----------------------------------
-#
-# If you want to allow non-local connections, you need to add more
-# "host" records.  In that case you will also need to make PostgreSQL
-# listen on a non-local interface via the listen_addresses
-# configuration parameter, or via the -i or -h command line switches.
-
-
-
-# TYPE  DATABASE        USER            ADDRESS                 METHOD
-
-# "local" is for Unix domain socket connections only
-#local   all             all                                     peer
-local   all             all                                     trust
-# IPv4 local connections:
-#host    all             all             127.0.0.1/32            trust
-host    all             all             0.0.0.0/0            trust
-# IPv6 local connections:
-host    all             all             ::1/128                 trust
-# Allow replication connections from localhost, by a user with the
-# replication privilege.
-#local   replication     postgres                                peer
-#host    replication     postgres        127.0.0.1/32            ident
-#host    replication     postgres        ::1/128                 ident
diff --git a/conf/docker-aio/entrypoint.bash b/conf/docker-aio/entrypoint.bash
deleted file mode 100755
index 236bb30f67a..00000000000
--- a/conf/docker-aio/entrypoint.bash
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env bash
-export LANG=en_US.UTF-8
-sudo -u postgres /usr/pgsql-13/bin/pg_ctl start -D /var/lib/pgsql/13/data &
-cd /opt/solr-8.11.1/
-# TODO: Run Solr as non-root and remove "-force".
-bin/solr start -force
-bin/solr create_core -c collection1 -d server/solr/collection1/conf -force
-
-# start apache, in both foreground and background...
-apachectl -DFOREGROUND &
-
-# TODO: Run Payara as non-root.
-cd /opt/payara5
-bin/asadmin start-domain --debug
-sleep infinity
-
diff --git a/conf/docker-aio/httpd.conf b/conf/docker-aio/httpd.conf
deleted file mode 100644
index 85c851d785f..00000000000
--- a/conf/docker-aio/httpd.conf
+++ /dev/null
@@ -1,27 +0,0 @@
-
-Include conf.d/*.conf
-Include conf.modules.d/*.conf
-ServerName localhost
-Listen 80 443
-PidFile run/httpd.pid
-DocumentRoot "/var/www/html"
-TypesConfig /etc/mime.types
-User apache
-Group apache
-
-<VirtualHost *:80>
-  ServerName localhost
-  LogLevel debug
- ErrorLog logs/error_log
- LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
- CustomLog logs/access_log combined
- 
-  # proxy config (aka - what to send to glassfish or not)
-  ProxyPassMatch ^/Shibboleth.sso !
-  ProxyPassMatch ^/shibboleth-ds !
-  # pass everything else to Glassfish
-  ProxyPass / ajp://localhost:8009/
-# glassfish can be slow sometimes
-  ProxyTimeout 300 
-
-</VirtualHost>
diff --git a/conf/docker-aio/install.bash b/conf/docker-aio/install.bash
deleted file mode 100755
index 2b3275ad830..00000000000
--- a/conf/docker-aio/install.bash
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env bash
-sudo -u postgres createuser --superuser dvnapp
-#./entrypoint.bash &
-unzip dvinstall.zip
-cd dvinstall/
-echo "beginning installer"
-./install -admin_email=dvAdmin@mailinator.com -y -f > install.out 2> install.err
-
-echo "installer complete"
-cat install.err
diff --git a/conf/docker-aio/prep_it.bash b/conf/docker-aio/prep_it.bash
deleted file mode 100755
index adb257e43b1..00000000000
--- a/conf/docker-aio/prep_it.bash
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env bash
-
-# run through all the steps to setup docker-aio to run integration tests
-
-# hard-codes several assumptions: image is named dv0, container is named dv, port is 8084
-
-# glassfish healthy/ready retries
-n_wait=5
-
-cd conf/docker-aio
-./0prep_deps.sh
-./1prep.sh
-docker build -t dv0 -f c8.dockerfile .
-# cleanup from previous runs if necessary
-docker rm -f dv
-# start container
-docker run -d -p 8084:80 -p 8083:8080 -p 9010:9009 --name dv dv0
-# wait for glassfish to be healthy
-i_wait=0
-d_wait=10
-while [ $i_wait -lt $n_wait ]
-do
-	h=`docker inspect -f "{{.State.Health.Status}}" dv`
-	if [ "healthy" == "${h}" ]; then
-		break
-	else
-		sleep $d_wait
-	fi
-	i_wait=$(( $i_wait + 1 ))
-	
-done
-# try setupIT.bash
-docker exec dv /opt/dv/setupIT.bash
-err=$?
-if [ $err -ne 0 ]; then
-	echo "error - setupIT failure"
-	exit 1
-fi
-# configure DOI provider based on docker build arguments / environmental variables
-docker exec dv /opt/dv/configure_doi.bash
-err=$?
-if [ $err -ne 0 ]; then
-	echo "error - DOI configuration failure"
-	exit 1
-fi
-# handle config for the private url test (and things like publishing...)
-./seturl.bash
-
-
-cd ../..
-#echo "docker-aio ready to run integration tests ($i_retry)"
-echo "docker-aio ready to run integration tests"
-curl http://localhost:8084/api/info/version
-echo $?
-
diff --git a/conf/docker-aio/readme.md b/conf/docker-aio/readme.md
deleted file mode 100644
index f3031a5bb6e..00000000000
--- a/conf/docker-aio/readme.md
+++ /dev/null
@@ -1,64 +0,0 @@
-# Docker All-In-One
-
-> :information_source:  **NOTE: Sunsetting of this module is imminent.** There is no schedule yet, but expect it to go away.
-> Please let the [Dataverse Containerization Working Group](https://ct.gdcc.io) know if you are a user and
-> what should be preserved.
-
-First pass docker all-in-one image, intended for running integration tests against.
-Also usable for normal development and system evaluation; not intended for production.
-
-### Requirements:
- - java11 compiler, maven, make, wget, docker
-
-### Quickstart:
- - in the root of the repository, run `./conf/docker-aio/prep_it.bash`
- - if using DataCite test credentials, update the build args appropriately.
- - if all goes well, you should see the results of the `api/info/version` endpoint, including the deployed build (eg `{"status":"OK","data":{"version":"4.8.6","build":"develop-c3e9f40"}}`). If not, you may need to read the non-quickstart instructions.
- - run integration tests: `./conf/docker-aio/run-test-suite.sh`
-
-----
-
-## More in-depth documentation:
-
-
-### Initial setup (aka - do once):
-- `cd conf/docker-aio` and run `./0prep_deps.sh` to created Payara and Solr tarballs in `conf/docker-aio/dv/deps`.
-
-### Per-build:
-
-> Note: If you encounter any issues, see the Troubleshooting section at the end of this document.
-
-#### Setup
-
-- `cd conf/docker-aio`, and run `./1prep.sh` to copy files for integration test data into docker build context; `1prep.sh` will also build the war file and installation zip file
-- build the docker image: `docker build -t dv0 -f c8.dockerfile .`
-
-- Run image: `docker run -d -p 8083:8080 -p 8084:80 --name dv dv0` (aka - forward port 8083 locally to 8080 in the container for payara, and 8084 to 80 for apache); if you'd like to connect a java debugger to payara, use `docker run -d -p 8083:8080 -p 8084:80 -p 9010:9009 --name dv dv0`
-
-- Installation (integration test): `docker exec dv /opt/dv/setupIT.bash` 
-  (Note that it's possible to customize the installation by editing `conf/docker-aio/default.config` and running `docker exec dv /opt/dv/install.bash` but for the purposes of integration testing, the `setupIT.bash` script above works fine.)
-
-- update `dataverse.siteUrl` (appears only necessary for `DatasetsIT.testPrivateUrl`): `docker exec dv /usr/local/glassfish4/bin/asadmin create-jvm-options "-Ddataverse.siteUrl=http\://localhost\:8084"` (or use the provided `seturl.bash`)
-
-#### Run integration tests: 
-
-First, cd back to the root of the repo where the `pom.xml` file is (`cd ../..` assuming you're still in the `conf/docker-aio` directory). Then run the test suite with script below:
-
-`conf/docker-aio/run-test-suite.sh`
-
-There isn't any strict requirement on the local port (8083, 8084 in this doc), the name of the image (dv0) or container (dv), these can be changed as desired as long as they are consistent.
-
-### Troubleshooting Notes:
-
-* If Dataverse' build fails due to an error about `Module` being ambiguous, you might be using a Java 9 compiler.
-
-* If you see an error like this: 
- ```
- docker: Error response from daemon: Conflict. The container name "/dv" is already in use by container "5f72a45b68c86c7b0f4305b83ce7d663020329ea4e30fa2a3ce9ddb05223533d"
- You have to remove (or rename) that container to be able to reuse that name.
- ``` 
-    run something like `docker ps -a | grep dv` to see the container left over from the last run and something like `docker rm 5f72a45b68c8` to remove it. Then try the `docker run` command above again.
-
-* `empty reply from server` or `Failed to connect to ::1: Cannot assign requested address` tend to indicate either that you haven't given payara enough time to start, or your docker setup is in an inconsistent state and should probably be restarted.
-
-* For manually fiddling around with the created dataverse, use user `dataverseAdmin` with password `admin1`.
diff --git a/conf/docker-aio/run-test-suite.sh b/conf/docker-aio/run-test-suite.sh
deleted file mode 100755
index 39809a7a50e..00000000000
--- a/conf/docker-aio/run-test-suite.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash
-# This is the canonical list of which "IT" tests are expected to pass.
-
-dvurl=$1
-if [ -z "$dvurl" ]; then
-	dvurl="http://localhost:8084"
-fi
-
-integrationtests=$(<tests/integration-tests.txt)
-
-# Please note the "dataverse.test.baseurl" is set to run for "all-in-one" Docker environment.
-# TODO: Rather than hard-coding the list of "IT" classes here, add a profile to pom.xml.
-mvn test -Dtest=$integrationtests -Ddataverse.test.baseurl=$dvurl
diff --git a/conf/docker-aio/setupIT.bash b/conf/docker-aio/setupIT.bash
deleted file mode 100755
index 528b8f3c5f8..00000000000
--- a/conf/docker-aio/setupIT.bash
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env bash
-
-# do integration-test install and test data setup
-
-cd /opt/dv
-unzip dvinstall.zip
-cd /opt/dv/testdata
-./scripts/deploy/phoenix.dataverse.org/prep
-./db.sh
-./install # modified from phoenix
-/usr/local/glassfish4/glassfish/bin/asadmin deploy /opt/dv/dvinstall/dataverse.war
-./post # modified from phoenix
-
diff --git a/conf/docker-aio/seturl.bash b/conf/docker-aio/seturl.bash
deleted file mode 100755
index a62fb6b3ea7..00000000000
--- a/conf/docker-aio/seturl.bash
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env bash
-
-docker exec dv /usr/local/glassfish4/bin/asadmin create-jvm-options "\"-Ddataverse.siteUrl=http\://localhost\:8084\""
diff --git a/conf/docker-aio/testdata/httpd.conf b/conf/docker-aio/testdata/httpd.conf
deleted file mode 100644
index 85c851d785f..00000000000
--- a/conf/docker-aio/testdata/httpd.conf
+++ /dev/null
@@ -1,27 +0,0 @@
-
-Include conf.d/*.conf
-Include conf.modules.d/*.conf
-ServerName localhost
-Listen 80 443
-PidFile run/httpd.pid
-DocumentRoot "/var/www/html"
-TypesConfig /etc/mime.types
-User apache
-Group apache
-
-<VirtualHost *:80>
-  ServerName localhost
-  LogLevel debug
- ErrorLog logs/error_log
- LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
- CustomLog logs/access_log combined
- 
-  # proxy config (aka - what to send to glassfish or not)
-  ProxyPassMatch ^/Shibboleth.sso !
-  ProxyPassMatch ^/shibboleth-ds !
-  # pass everything else to Glassfish
-  ProxyPass / ajp://localhost:8009/
-# glassfish can be slow sometimes
-  ProxyTimeout 300 
-
-</VirtualHost>
diff --git a/conf/docker-aio/testscripts/db.sh b/conf/docker-aio/testscripts/db.sh
deleted file mode 100755
index f0a9e409fd7..00000000000
--- a/conf/docker-aio/testscripts/db.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/sh
-psql -U postgres -c "CREATE ROLE dvnapp PASSWORD 'secret' SUPERUSER CREATEDB CREATEROLE INHERIT LOGIN" template1
-psql -U dvnapp -c 'CREATE DATABASE "dvndb" WITH OWNER = "dvnapp"' template1
diff --git a/conf/docker-aio/testscripts/install b/conf/docker-aio/testscripts/install
deleted file mode 100755
index f87f180b554..00000000000
--- a/conf/docker-aio/testscripts/install
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/sh
-export HOST_ADDRESS=localhost
-export GLASSFISH_ROOT=/opt/payara5
-export FILES_DIR=/opt/payara5/glassfish/domains/domain1/files
-export DB_NAME=dvndb
-export DB_PORT=5432
-export DB_HOST=localhost
-export DB_USER=dvnapp
-export DB_PASS=secret
-export RSERVE_HOST=localhost
-export RSERVE_PORT=6311
-export RSERVE_USER=rserve
-export RSERVE_PASS=rserve
-export SMTP_SERVER=localhost
-export MEM_HEAP_SIZE=2048
-export GLASSFISH_DOMAIN=domain1
-cd scripts/installer
-#cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf
-cp /opt/dv/testdata/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf
-cp /opt/dv/testdata/jhoveConfig.xsd $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhoveConfig.xsd
-./as-setup.sh dvndb
diff --git a/conf/docker-aio/testscripts/post b/conf/docker-aio/testscripts/post
deleted file mode 100755
index 0f292109d31..00000000000
--- a/conf/docker-aio/testscripts/post
+++ /dev/null
@@ -1,13 +0,0 @@
-#/bin/sh
-cd scripts/api
-./setup-all.sh --insecure -p=admin1 | tee /tmp/setup-all.sh.out
-cd ../..
-psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/createsequence.sql
-scripts/search/tests/publish-dataverse-root
-#git checkout scripts/api/data/dv-root.json
-scripts/search/tests/grant-authusers-add-on-root
-scripts/search/populate-users
-scripts/search/create-users
-scripts/search/tests/create-all-and-test
-scripts/search/tests/publish-spruce1-and-test
-#java -jar downloads/schemaSpy_5.0.0.jar -t pgsql -host localhost -db dvndb -u postgres -p secret -s public -dp scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar -o /var/www/html/schemaspy/latest
diff --git a/conf/docker-dcm/.gitignore b/conf/docker-dcm/.gitignore
deleted file mode 100644
index ac39981ce6a..00000000000
--- a/conf/docker-dcm/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-*.rpm
-upload*.bash
diff --git a/conf/docker-dcm/0prep.sh b/conf/docker-dcm/0prep.sh
deleted file mode 100755
index 300aa39d567..00000000000
--- a/conf/docker-dcm/0prep.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-DCM_VERSION=0.5
-RSAL_VERSION=0.1
-
-if [ ! -e dcm-${DCM_VERSION}-0.noarch.rpm ]; then
-	wget https://github.com/sbgrid/data-capture-module/releases/download/${DCM_VERSION}/dcm-${DCM_VERSION}-0.noarch.rpm
-fi
-
-if [ ! -e rsal-${RSAL_VERSION}-0.noarch.rpm ] ;then
-	wget https://github.com/sbgrid/rsal/releases/download/${RSAL_VERSION}/rsal-${RSAL_VERSION}-0.noarch.rpm
-fi
diff --git a/conf/docker-dcm/c6client.dockerfile b/conf/docker-dcm/c6client.dockerfile
deleted file mode 100644
index e4d1ae7da82..00000000000
--- a/conf/docker-dcm/c6client.dockerfile
+++ /dev/null
@@ -1,7 +0,0 @@
-# build from repo root
-FROM centos:6
-RUN yum install -y epel-release
-RUN yum install -y rsync openssh-clients jq curl wget lynx
-RUN useradd depositor
-USER depositor
-WORKDIR /home/depositor
diff --git a/conf/docker-dcm/cfg/dcm/bashrc b/conf/docker-dcm/cfg/dcm/bashrc
deleted file mode 100644
index 07137ab8471..00000000000
--- a/conf/docker-dcm/cfg/dcm/bashrc
+++ /dev/null
@@ -1,18 +0,0 @@
-# .bashrc
-
-# User specific aliases and functions
-
-alias rm='rm -i'
-alias cp='cp -i'
-alias mv='mv -i'
-
-# Source global definitions
-if [ -f /etc/bashrc ]; then
-	. /etc/bashrc
-fi
-
-# these are dummy values, obviously
-export UPLOADHOST=dcmsrv
-export DVAPIKEY=burrito
-export DVHOSTINT=dvsrv
-export DVHOST=dvsrv
diff --git a/conf/docker-dcm/cfg/dcm/entrypoint-dcm.sh b/conf/docker-dcm/cfg/dcm/entrypoint-dcm.sh
deleted file mode 100755
index 0db674bfac4..00000000000
--- a/conf/docker-dcm/cfg/dcm/entrypoint-dcm.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-
-/etc/init.d/sshd start
-/etc/init.d/redis start
-/etc/init.d/rq start
-lighttpd -D -f /etc/lighttpd/lighttpd.conf
diff --git a/conf/docker-dcm/cfg/dcm/healthcheck-dcm.sh b/conf/docker-dcm/cfg/dcm/healthcheck-dcm.sh
deleted file mode 100755
index 3964a79391e..00000000000
--- a/conf/docker-dcm/cfg/dcm/healthcheck-dcm.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-
-r_rq=`/etc/init.d/rq status`
-if [ "rq_worker running" != "$r_rq" ]; then
-	echo "rq failed"
-	exit 1
-fi
-r_www=`/etc/init.d/lighttpd status`
-e_www=$?
-if [ 0 -ne $e_www ]; then
-	echo "lighttpd failed"
-	exit 2
-fi
-
diff --git a/conf/docker-dcm/cfg/dcm/rq-init-d b/conf/docker-dcm/cfg/dcm/rq-init-d
deleted file mode 100755
index 093cd894376..00000000000
--- a/conf/docker-dcm/cfg/dcm/rq-init-d
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/bin/bash
-
-# chkconfig: 2345 90 60
-# description: rq worker script (single worker process)
-
-# example rq configuration file (to be placed in /etc/init.d)
-
-# works on cent6
-
-DAEMON=rq_worker
-DAEMON_PATH=/opt/dcm/gen/
-export UPLOADHOST=dcmsrv
-VIRTUALENV=
-LOGFILE=/var/log/${DAEMON}.log
-PIDFILE=/var/run/${DAEMON}.pid
-
-case "$1" in
-start)
-	printf "%-50s" "starting $DAEMON..."
-	cd $DAEMON_PATH
-	if [ ! -z "$VIRTUALENV" ]; then
-		source $VIRTUALENV/bin/activate
-	fi
-	rq worker normal --pid $PIDFILE > ${LOGFILE} 2>&1 &
-;;
-status)
-	if [ -f $PIDFILE ]; then
-		PID=`cat $PIDFILE`
-		if [ -z "`ps axf | grep ${PID} | grep -v grep`" ]; then
-			printf "%s\n" "$DAEMON not running, but PID file ($PIDFILE) exists"
-		else
-			echo "$DAEMON running"
-		fi
-	else
-		printf "%s\n" "$DAEMON not running"
-	fi
-;;
-stop)
-	printf "%-50s" "stopping $DAEMON"
-	if [ -f $PIDFILE ]; then
-		PID=`cat $PIDFILE`
-		kill -HUP $PID
-		rm -f $PIDFILE
-	else
-		printf "%s\n" "no PID file ($PIDFILE) - maybe not running"
-	fi
-;;
-restart)
-	$0 stop
-	$0 start
-;;
-
-*)
-	echo "Usage: $0 {status|start|stop|restart}"
-	exit 1
-esac
-
diff --git a/conf/docker-dcm/cfg/dcm/test_install.sh b/conf/docker-dcm/cfg/dcm/test_install.sh
deleted file mode 100755
index 3026ceb9fa5..00000000000
--- a/conf/docker-dcm/cfg/dcm/test_install.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-
-cp /etc/dcm/rq-init-d /etc/init.d/rq
-cp /etc/dcm/lighttpd-conf-dcm /etc/lighttpd/lighttpd.conf
-cp /etc/dcm/lighttpd-modules-dcm /etc/lighttpd/modules.conf
-cp /etc/dcm/dcm-rssh.conf /etc/rssh.conf
-
diff --git a/conf/docker-dcm/cfg/rsal/entrypoint-rsal.sh b/conf/docker-dcm/cfg/rsal/entrypoint-rsal.sh
deleted file mode 100755
index 92466c3bd4b..00000000000
--- a/conf/docker-dcm/cfg/rsal/entrypoint-rsal.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-
-#/usr/bin/rsync --no-detach --daemon --config /etc/rsyncd.conf
-/usr/bin/rsync --daemon --config /etc/rsyncd.conf
-lighttpd -D -f /etc/lighttpd/lighttpd.conf
diff --git a/conf/docker-dcm/cfg/rsal/lighttpd-modules.conf b/conf/docker-dcm/cfg/rsal/lighttpd-modules.conf
deleted file mode 100644
index cdb1438af82..00000000000
--- a/conf/docker-dcm/cfg/rsal/lighttpd-modules.conf
+++ /dev/null
@@ -1,174 +0,0 @@
-#######################################################################
-##
-## ansible managed
-#
-##  Modules to load
-## -----------------
-##
-## at least mod_access and mod_accesslog should be loaded
-## all other module should only be loaded if really neccesary
-##
-## - saves some time
-## - saves memory
-##
-## the default module set contains:
-##
-## "mod_indexfile", "mod_dirlisting", "mod_staticfile"
-##
-## you dont have to include those modules in your list
-##
-## Modules, which are pulled in via conf.d/*.conf
-##
-## NOTE: the order of modules is important.
-##
-## - mod_accesslog     -> conf.d/access_log.conf
-## - mod_compress      -> conf.d/compress.conf
-## - mod_status        -> conf.d/status.conf
-## - mod_webdav        -> conf.d/webdav.conf
-## - mod_cml           -> conf.d/cml.conf
-## - mod_evhost        -> conf.d/evhost.conf
-## - mod_simple_vhost  -> conf.d/simple_vhost.conf
-## - mod_mysql_vhost   -> conf.d/mysql_vhost.conf
-## - mod_trigger_b4_dl -> conf.d/trigger_b4_dl.conf
-## - mod_userdir       -> conf.d/userdir.conf
-## - mod_rrdtool       -> conf.d/rrdtool.conf
-## - mod_ssi           -> conf.d/ssi.conf
-## - mod_cgi           -> conf.d/cgi.conf
-## - mod_scgi          -> conf.d/scgi.conf
-## - mod_fastcgi       -> conf.d/fastcgi.conf
-## - mod_proxy         -> conf.d/proxy.conf
-## - mod_secdownload   -> conf.d/secdownload.conf
-## - mod_expire        -> conf.d/expire.conf
-##
-
-server.modules = (
-  "mod_access",
-#  "mod_alias",
-#  "mod_auth",
-#  "mod_evasive",
-#  "mod_redirect",
-#  "mod_rewrite",
-#  "mod_setenv",
-#  "mod_usertrack",
-)
-
-##
-#######################################################################
-
-#######################################################################
-##
-##  Config for various Modules
-##
-
-##
-## mod_ssi
-##
-#include "conf.d/ssi.conf"
-
-##
-## mod_status
-##
-#include "conf.d/status.conf"
-
-##
-## mod_webdav
-##
-#include "conf.d/webdav.conf"
-
-##
-## mod_compress
-##
-#include "conf.d/compress.conf"
-
-##
-## mod_userdir
-##
-#include "conf.d/userdir.conf"
-
-##
-## mod_magnet
-##
-#include "conf.d/magnet.conf"
-
-##
-## mod_cml
-##
-#include "conf.d/cml.conf"
-
-##
-## mod_rrdtool
-##
-#include "conf.d/rrdtool.conf"
-
-##
-## mod_proxy
-##
-#include "conf.d/proxy.conf"
-
-##
-## mod_expire
-##
-#include "conf.d/expire.conf"
-
-##
-## mod_secdownload
-##
-#include "conf.d/secdownload.conf"
-
-##
-#######################################################################
-
-#######################################################################
-##
-## CGI modules
-##
-
-##
-## SCGI (mod_scgi)
-##
-#include "conf.d/scgi.conf"
-
-##
-## FastCGI (mod_fastcgi)
-##
-#include "conf.d/fastcgi.conf"
-
-##
-## plain old CGI (mod_cgi)
-##
-include "conf.d/cgi.conf"
-
-##
-#######################################################################
-
-#######################################################################
-##
-## VHost Modules
-##
-##  Only load ONE of them!
-## ========================
-##
-
-##
-## You can use conditionals for vhosts aswell.
-## 
-## see http://www.lighttpd.net/documentation/configuration.html
-##
-
-##
-## mod_evhost
-##
-#include "conf.d/evhost.conf"
-
-##
-## mod_simple_vhost
-##
-#include "conf.d/simple_vhost.conf"
-
-##
-## mod_mysql_vhost
-##
-#include "conf.d/mysql_vhost.conf"
-
-##
-#######################################################################
diff --git a/conf/docker-dcm/cfg/rsal/lighttpd.conf b/conf/docker-dcm/cfg/rsal/lighttpd.conf
deleted file mode 100644
index 5874d60eb48..00000000000
--- a/conf/docker-dcm/cfg/rsal/lighttpd.conf
+++ /dev/null
@@ -1,43 +0,0 @@
-## lighttpd configuration customized for RSAL; centos7
-
-# refuse connections not from frontend or localhost
-# DO NOT HAVE THIS OPEN TO THE WORLD!!!
-#$HTTP["remoteip"] !~ "192.168.2.2|127.0.0.1" {
-#url.access-deny = ("")
-#}
-server.breakagelog = "/var/log/lighttpd/breakage.log"
-
-#######################################################################
-##
-## Some Variable definition which will make chrooting easier.
-##
-## if you add a variable here. Add the corresponding variable in the
-## chroot example aswell.
-##
-var.log_root    = "/var/log/lighttpd"
-var.server_root = "/opt/rsal/api"
-var.state_dir   = "/var/run"
-var.home_dir    = "/var/lib/lighttpd"
-var.conf_dir    = "/etc/lighttpd"
-
-var.cache_dir   = "/var/cache/lighttpd"
-var.socket_dir  = home_dir + "/sockets"
-include "modules.conf"
-server.port = 80
-server.use-ipv6 = "disable"
-server.username  = "lighttpd"
-server.groupname = "lighttpd"
-server.document-root = server_root 
-server.pid-file = state_dir + "/lighttpd.pid"
-server.errorlog             = log_root + "/error.log"
-include "conf.d/access_log.conf"
-include "conf.d/debug.conf"
-server.event-handler = "linux-sysepoll"
-server.network-backend = "linux-sendfile"
-server.stat-cache-engine = "simple"
-server.max-connections = 1024
-static-file.exclude-extensions = ( ".php", ".pl", ".fcgi", ".scgi" )
-include "conf.d/mime.conf"
-include "conf.d/dirlisting.conf"
-server.follow-symlink = "enable"
-server.upload-dirs = ( "/var/tmp" )
diff --git a/conf/docker-dcm/cfg/rsal/rsyncd.conf b/conf/docker-dcm/cfg/rsal/rsyncd.conf
deleted file mode 100644
index 5a15ab28a12..00000000000
--- a/conf/docker-dcm/cfg/rsal/rsyncd.conf
+++ /dev/null
@@ -1,8 +0,0 @@
-lock file=/var/run/rsync.lock
-log file=/var/log/rsyncd.log
-pid file=/var/log/rsyncd.pid
-
-[10.5072]
- path=/public/
- read only=yes
-
diff --git a/conf/docker-dcm/configure_dcm.sh b/conf/docker-dcm/configure_dcm.sh
deleted file mode 100755
index 5b65b0a0314..00000000000
--- a/conf/docker-dcm/configure_dcm.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/sh
-
-echo "dcm configs on dv side to be done"
-
-# in homage to dataverse traditions, reset to insecure "burrito" admin API key
-sudo -u postgres psql -c "update apitoken set tokenstring='burrito' where id=1;" dvndb
-sudo -u postgres psql -c "update authenticateduser set superuser='t' where id=1;" dvndb
-
-# dataverse configs for DCM
-curl -X PUT -d "SHA-1" "http://localhost:8080/api/admin/settings/:FileFixityChecksumAlgorithm"
-curl -X PUT "http://localhost:8080/api/admin/settings/:UploadMethods" -d "dcm/rsync+ssh"
-curl -X PUT "http://localhost:8080/api/admin/settings/:DataCaptureModuleUrl" -d "http://dcmsrv"
-
-# configure for RSAL downloads; but no workflows or RSAL yet
-curl -X PUT "http://localhost:8080/api/admin/settings/:DownloadMethods" -d "rsal/rsync"
-
-# publish root dataverse
-curl -X POST -H "X-Dataverse-key: burrito" "http://localhost:8080/api/dataverses/root/actions/:publish"
-
-# symlink `hold` volume 
-mkdir -p /usr/local/glassfish4/glassfish/domains/domain1/files/
-ln -s /hold /usr/local/glassfish4/glassfish/domains/domain1/files/10.5072
-
-# need to set siteUrl
-cd /usr/local/glassfish4
-bin/asadmin create-jvm-options "\"-Ddataverse.siteUrl=http\://localhost\:8084\""
diff --git a/conf/docker-dcm/configure_rsal.sh b/conf/docker-dcm/configure_rsal.sh
deleted file mode 100755
index 5db43a34381..00000000000
--- a/conf/docker-dcm/configure_rsal.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/sh
-
-fn=rsal-workflow2.json
-# needs an actual IP (vs a hostname) for whitelist
-rsalip=`dig +short rsalsrv`
-
-# create workflow
-curl -s -X POST -H "Content-type: application/json" -d @${fn} "http://localhost:8080/api/admin/workflows" 
-
-# put rsal on the whitelist
-curl -X PUT -d "127.0.0.1;${rsalip}" "http://localhost:8080/api/admin/workflows/ip-whitelist"
-
-# set workflow as default
-curl -X PUT -d "1" "http://localhost:8080/api/admin/workflows/default/PrePublishDataset"
-
-# local access path
-curl -X PUT -d "/hpc/storage" "http://localhost:8080/api/admin/settings/:LocalDataAccessPath"
-
-# storage sites
-curl -X POST -H "Content-type: application/json" --upload-file site-primary.json "http://localhost:8080/api/admin/storageSites"
-curl -X POST -H "Content-type: application/json" --upload-file site-remote.json "http://localhost:8080/api/admin/storageSites"
diff --git a/conf/docker-dcm/create.bash b/conf/docker-dcm/create.bash
deleted file mode 100755
index 58ae6e61dc7..00000000000
--- a/conf/docker-dcm/create.bash
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env bash
-
-
-# user creates dataset
-k_d=burrito
-dv_d=root
-h=http://dvsrv
-
-fn=dataset.json
-#dset_id=`curl -s -H "X-Dataverse-key: $k_d" -X POST --upload-file $fn $h/api/dataverses/$dv_d/datasets | jq .data.id`
-r=`curl -s -H "X-Dataverse-key: $k_d" -X POST --upload-file $fn $h/api/dataverses/$dv_d/datasets`
-echo $r
-dset_id=`echo $r | jq .data.id`
-echo "dataset created with id: $dset_id"
-
-if [ "null" == "${dset_id}" ]; then
-	echo "error - no dataset id from create command"
-	exit 1
-fi
-echo "dataset created; internal/db id: ${dset_id}"
-
-
diff --git a/conf/docker-dcm/dataset.json b/conf/docker-dcm/dataset.json
deleted file mode 100644
index fb1b734ed40..00000000000
--- a/conf/docker-dcm/dataset.json
+++ /dev/null
@@ -1,126 +0,0 @@
-{
-    "datasetVersion": {
-      "metadataBlocks": {
-        "citation": {
-          "displayName": "Citation Metadata",
-          "fields": [
-            {
-              "typeName": "title",
-              "multiple": false,
-              "typeClass": "primitive",
-              "value": "DCM test dataset"
-            },
-            {
-              "typeName": "productionDate",
-              "multiple": false,
-              "typeClass": "primitive",
-              "value": "2017-04-01"
-            },
-            {
-              "typeName": "dsDescription",
-              "multiple": true,
-              "typeClass": "compound",
-              "value": [
-                {
-                  "dsDescriptionValue": {
-                    "typeName": "dsDescriptionValue",
-                    "multiple": false,
-                    "typeClass": "primitive",
-                    "value": "this would normally be a dataset large enough to require a DCM"
-                  }
-                }
-              ]
-            },
-            {
-              "typeName": "depositor",
-              "multiple": false,
-              "typeClass": "primitive",
-              "value": "Doc, Bob"
-            },
-            {
-              "typeName": "producer",
-              "multiple": true,
-              "typeClass": "compound",
-              "value": [
-                {
-                  "producerName": {
-                    "typeName": "producerName",
-                    "multiple": false,
-                    "typeClass": "primitive",
-                    "value": "Prof, Arthor"
-                  },
-                  "producerAffiliation": {
-                    "typeName": "producerAffiliation",
-                    "multiple": false,
-                    "typeClass": "primitive",
-                    "value": "LibraScholar"
-                  }
-                }
-              ]
-            },
-            {
-              "typeName": "author",
-              "multiple": true,
-              "typeClass": "compound",
-              "value": [
-                {
-                  "authorName": {
-                    "typeName": "authorName",
-                    "multiple": false,
-                    "typeClass": "primitive",
-                    "value": "Student, Carol"
-                  }
-                ,
-		  "authorAffiliation": {
-		   "typeName": "authorAffiliation",
-		   "multiple": false,
-		   "typeClass": "primitive",
-		   "value": "LibraScholar"
-		  }
-		},
-                {
-                  "authorName": {
-                    "typeName": "authorName",
-                    "multiple": false,
-                    "typeClass": "primitive",
-                    "value": "Doc, Bob"
-                  }
-                ,
-		  "authorAffiliation": {
-		   "typeName": "authorAffiliation",
-		   "multiple": false,
-		   "typeClass": "primitive",
-		   "value": "LibraScholar"
-		  }
-		}
-		
-              ]
-            },
-            {
-              "typeName": "datasetContact",
-              "multiple": true,
-              "typeClass": "compound",
-              "value": [
-                {
-                  "datasetContactEmail": {
-                    "typeName": "datasetContactEmail",
-                    "multiple": false,
-                    "typeClass": "primitive",
-                    "value": "dsContact@mailinator.com"
-                  }
-                }
-              ]
-            },
-            {
-              "typeName": "subject",
-              "multiple": true,
-              "typeClass": "controlledVocabulary",
-              "value": [
-                "Medicine, Health and Life Sciences"
-              ]
-            }
-          ]
-        }
-      }
-  }
-}
diff --git a/conf/docker-dcm/dcmsrv.dockerfile b/conf/docker-dcm/dcmsrv.dockerfile
deleted file mode 100644
index 9989fa3a89d..00000000000
--- a/conf/docker-dcm/dcmsrv.dockerfile
+++ /dev/null
@@ -1,21 +0,0 @@
-# build from repo root
-FROM centos:6
-RUN yum install -y epel-release
-ARG RPMFILE=dcm-0.5-0.noarch.rpm
-COPY ${RPMFILE} /tmp/
-COPY cfg/dcm/bashrc /root/.bashrc
-COPY cfg/dcm/test_install.sh /root/
-RUN yum localinstall -y /tmp/${RPMFILE}
-RUN pip install -r /opt/dcm/requirements.txt
-RUN pip install awscli==1.15.75
-run export PATH=~/.local/bin:$PATH
-RUN /root/test_install.sh
-COPY cfg/dcm/rq-init-d /etc/init.d/rq
-RUN useradd glassfish
-COPY cfg/dcm/entrypoint-dcm.sh /
-COPY cfg/dcm/healthcheck-dcm.sh /
-EXPOSE 80
-EXPOSE 22
-VOLUME /hold
-HEALTHCHECK CMD /healthcheck-dcm.sh
-CMD ["/entrypoint-dcm.sh"]
diff --git a/conf/docker-dcm/docker-compose.yml b/conf/docker-dcm/docker-compose.yml
deleted file mode 100644
index 49d4467d349..00000000000
--- a/conf/docker-dcm/docker-compose.yml
+++ /dev/null
@@ -1,50 +0,0 @@
-# initial docker-compose file for combined Dataverse and DCM with shared filesystem
-
-version: '3'
-
-services:
-  dcmsrv:
-    build:
-      context: .
-      dockerfile: dcmsrv.dockerfile
-    container_name: dcmsrv
-    volumes:
-      - hold:/hold
-  rsalsrv:
-    build:
-      context: .
-      dockerfile: rsalsrv.dockerfile
-    container_name: rsalsrv
-#    image: rsalrepo_rsal
-    volumes:
-      - hold:/hold
-      - ./:/mnt
-    environment:
-      DV_HOST: http://dvsrv:8080
-      DV_APIKEY: burrito
-    ports:
-      - "8889:80"
-      - "873:873"
-  dvsrv:
-    build:
-      context: .
-      dockerfile: dv0dcm.dockerfile
-    container_name: dvsrv
-    volumes:
-      - hold:/hold
-      - ./:/mnt
-    ports:
-      - "8083:8080"
-      - "8084:80"
-  client:
-    build:
-      context: .
-      dockerfile: c6client.dockerfile
-    command: sleep infinity
-    container_name: dcm_client
-    volumes:
-      - ./:/mnt
-
-volumes:
-  hold:
-
diff --git a/conf/docker-dcm/dv0dcm.dockerfile b/conf/docker-dcm/dv0dcm.dockerfile
deleted file mode 100644
index 021534c8978..00000000000
--- a/conf/docker-dcm/dv0dcm.dockerfile
+++ /dev/null
@@ -1,7 +0,0 @@
-# dv0 assumed to be image name for docker-aio
-FROM dv0
-RUN yum install -y bind-utils
-COPY configure_dcm.sh /opt/dv/
-COPY configure_rsal.sh /opt/dv/
-COPY rsal-workflow2.json site-primary.json site-remote.json /opt/dv/
-VOLUME /hold
diff --git a/conf/docker-dcm/get_transfer.bash b/conf/docker-dcm/get_transfer.bash
deleted file mode 100755
index 42080f536e1..00000000000
--- a/conf/docker-dcm/get_transfer.bash
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env bash
-
-# user gets transfer script
-
-dset_id=$1
-if [ -z "$dset_id" ]; then
-	echo "no dataset id specified, bailing out"
-	exit 1
-fi
-
-k_d=burrito
-dv_d=root
-
-h=http://dvsrv
-
-#get upload script from DCM
-wget --header "X-Dataverse-key: ${k_d}" ${h}/api/datasets/${dset_id}/dataCaptureModule/rsync -O upload-${dset_id}.bash
-
-
diff --git a/conf/docker-dcm/publish_major.bash b/conf/docker-dcm/publish_major.bash
deleted file mode 100755
index 6a3fd1288ca..00000000000
--- a/conf/docker-dcm/publish_major.bash
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env bash
-
-# publish dataset based on database id
-
-dset_id=$1
-if [ -z "$dset_id" ]; then
-	echo "no dataset id specified, bailing out"
-	exit 1
-fi
-
-k_d=burrito
-
-h=http://dvsrv
-
-curl -X POST -H "X-Dataverse-key: ${k_d}" "${h}/api/datasets/${dset_id}/actions/:publish?type=major"
-
-
diff --git a/conf/docker-dcm/readme.md b/conf/docker-dcm/readme.md
deleted file mode 100644
index 3e6a15e61d6..00000000000
--- a/conf/docker-dcm/readme.md
+++ /dev/null
@@ -1,26 +0,0 @@
-This docker-compose setup is intended for use in development, small scale evaluation, and potentially serve as an example of a working (although not production security level) configuration.
-
-Setup:
-
-- build docker-aio image with name dv0 as described in `../docker-aio` (don't start up the docker image or run setupIT.bash)
-- work in the `conf/docker-dcm` directory for below commands
-- download/prepare dependencies: `./0prep.sh`
-- build dcm/dv0dcm images with docker-compose: `docker-compose -f docker-compose.yml build`
-- start containers: `docker-compose -f docker-compose.yml up -d`
-- wait for container to show "healthy" (aka - `docker ps`), then run dataverse app installation: `docker exec dvsrv /opt/dv/install.bash`
-- for development, you probably want to use the `FAKE` DOI provider: `docker exec -it dvsrv /opt/dv/configure_doi.bash`
-- configure dataverse application to use DCM: `docker exec -it dvsrv /opt/dv/configure_dcm.sh`
-- configure dataverse application to use RSAL (if desired): `docker exec -it dvsrv /opt/dv/configure_rsal.sh`
-
-Operation:
-The dataverse installation is accessible at `http://localhost:8084`.
-The `dcm_client` container is intended to be used for executing transfer scripts, and `conf/docker-dcm` is available at `/mnt` inside the container; this container can be accessed with `docker exec -it dcm_client bash`.
-The DCM cron job is NOT configured here; for development purposes the DCM checks can be run manually with `docker exec -it dcmsrv /opt/dcm/scn/post_upload.bash`.
-The RSAL cron job is similarly NOT configured; for development purposes `docker exec -it rsalsrv /opt/rsal/scn/pub.py` can be run manually.
-
-
-Cleanup:
-- shutdown/cleanup `docker-compose -f docker-compose.yml down -v`
-
-For reference, this configuration was working with docker 17.09 / docker-compose 1.16.
-
diff --git a/conf/docker-dcm/rsal-workflow2.json b/conf/docker-dcm/rsal-workflow2.json
deleted file mode 100644
index 322d3ecbcf7..00000000000
--- a/conf/docker-dcm/rsal-workflow2.json
+++ /dev/null
@@ -1,31 +0,0 @@
-{
-    "name": "RSAL file move for publication",
-    "steps": [
-        {
-            "provider":":internal",
-            "stepType":"log",
-            "parameters": {
-                "message": "Pre-http request"
-            }
-        },
-        {
-            "provider":":internal",
-            "stepType":"http/sr",
-            "parameters": {
-                "url":"http://rsalsrv/rr.py",
-                "method":"POST",
-                "contentType":"text/plain",
-                "body":"${invocationId}\ndataset.id=${dataset.id}\ndataset.identifier=${dataset.identifier}\ndataset.globalId=${dataset.globalId}",
-                "expectedResponse":"OK.*",
-                "rollbackMethod":"DELETE"
-            }
-        },
-        {
-            "provider":":internal",
-            "stepType":"log",
-            "parameters": {
-                "message": "Post-http request"
-            }
-        }
-    ]
-}
diff --git a/conf/docker-dcm/rsalsrv.dockerfile b/conf/docker-dcm/rsalsrv.dockerfile
deleted file mode 100644
index 844432afe6b..00000000000
--- a/conf/docker-dcm/rsalsrv.dockerfile
+++ /dev/null
@@ -1,20 +0,0 @@
-FROM centos:7
-ARG RPMFILE=rsal-0.1-0.noarch.rpm
-RUN yum update; yum install -y epel-release 
-COPY ${RPMFILE} /tmp/
-RUN yum localinstall -y /tmp/${RPMFILE}
-COPY cfg/rsal/rsyncd.conf /etc/rsyncd.conf
-COPY cfg/rsal/entrypoint-rsal.sh /entrypoint.sh
-COPY cfg/rsal/lighttpd-modules.conf /etc/lighttpd/modules.conf
-COPY cfg/rsal/lighttpd.conf /etc/lighttpd/lighttpd.conf
-RUN mkdir -p /public/FK2 
-RUN pip2 install -r /opt/rsal/scn/requirements.txt
-#COPY doc/testdata/ /hold/
-ARG DV_HOST=http://dv_srv:8080
-ARG DV_API_KEY=burrito
-ENV DV_HOST ${DV_HOST}
-ENV DV_API_KEY ${DV_API_KEY}
-EXPOSE 873
-EXPOSE 80
-HEALTHCHECK CMD curl --fail http://localhost/hw.py || exit 1
-CMD ["/entrypoint.sh"]
diff --git a/conf/docker-dcm/site-primary.json b/conf/docker-dcm/site-primary.json
deleted file mode 100644
index 35b217edffd..00000000000
--- a/conf/docker-dcm/site-primary.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
-	"hostname": "rsalsrv",
-	"name": "LibraScholar University",
-	"primaryStorage": true,
-	"transferProtocols": "rsync,posix"
-}
diff --git a/conf/docker-dcm/site-remote.json b/conf/docker-dcm/site-remote.json
deleted file mode 100644
index d47c3ef4dda..00000000000
--- a/conf/docker-dcm/site-remote.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
-	"hostname": "remote.libra.research",
-	"name": "LibraResearch Institute",
-	"primaryStorage": false,
-	"transferProtocols": "rsync"
-}
diff --git a/conf/jhove/jhove.conf b/conf/jhove/jhove.conf
index 5134ae0f81a..971c60acfaa 100644
--- a/conf/jhove/jhove.conf
+++ b/conf/jhove/jhove.conf
@@ -3,7 +3,7 @@
  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  xmlns="http://hul.harvard.edu/ois/xml/ns/jhove/jhoveConfig"
  xsi:schemaLocation="http://hul.harvard.edu/ois/xml/ns/jhove/jhoveConfig
-                     file:///usr/local/payara5/glassfish/domains/domain1/config/jhoveConfig.xsd">
+                     file:///usr/local/payara6/glassfish/domains/domain1/config/jhoveConfig.xsd">
  <jhoveHome>/usr/local/src/jhove</jhoveHome>
  <defaultEncoding>utf-8</defaultEncoding>
  <tempDirectory>/tmp</tempDirectory>
diff --git a/conf/solr/8.11.1/readme.md b/conf/solr/8.11.1/readme.md
deleted file mode 100644
index 4457cf9a7df..00000000000
--- a/conf/solr/8.11.1/readme.md
+++ /dev/null
@@ -1 +0,0 @@
-Please see the dev guide for what to do with Solr config files.
\ No newline at end of file
diff --git a/conf/solr/8.11.1/schema.xml b/conf/solr/9.3.0/schema.xml
similarity index 96%
rename from conf/solr/8.11.1/schema.xml
rename to conf/solr/9.3.0/schema.xml
index ceff082f418..3c15b659c4e 100644
--- a/conf/solr/8.11.1/schema.xml
+++ b/conf/solr/9.3.0/schema.xml
@@ -23,7 +23,7 @@
 
 
  For more information, on how to customize this file, please see
- http://lucene.apache.org/solr/guide/documents-fields-and-schema-design.html
+ https://solr.apache.org/guide/solr/latest/indexing-guide/schema-elements.html
 
  PERFORMANCE NOTE: this schema includes many optional features and should not
  be used for benchmarking.  To improve performance one could
@@ -38,7 +38,7 @@
     catchall "text" field, and use that for searching.
 -->
 
-<schema name="default-config" version="1.7">
+<schema name="default-config" version="1.6">
     <!-- attribute "name" is the name of this schema and is only used for display purposes.
        version="x.y" is Solr's version number for the schema syntax and 
        semantics.  It should not normally be changed by applications.
@@ -129,15 +129,8 @@
     <!-- catchall text field that indexes tokens both normally and in reverse for efficient
         leading wildcard queries. -->
     <field name="text_rev" type="text_general_rev" indexed="true" stored="false" multiValued="true"/>    
-    <field name="name" type="text_en" indexed="true" stored="true"/> 
-
-
-
-
-
-
-
-
+    <field name="name" type="text_en" indexed="true" stored="true"/>
+    
     <field name="definitionPointDocId" type="string" stored="true" indexed="true" multiValued="false"/>
     <field name="definitionPointDvObjectId" type="string" stored="true" indexed="true" multiValued="false"/>
     <field name="discoverableBy" type="string" stored="true" indexed="true" multiValued="true"/>
@@ -163,7 +156,7 @@
 
     <field name="publicationStatus" type="string" stored="true" indexed="true" multiValued="true"/>
     <field name="externalStatus" type="string" stored="true" indexed="true" multiValued="false"/>
-    <field name="embargoEndDate" type="long" stored="true" indexed="true" multiValued="false"/>
+    <field name="embargoEndDate" type="plong" stored="true" indexed="true" multiValued="false"/>
     
     <field name="subtreePaths" type="string" stored="true" indexed="true" multiValued="true"/>
 
@@ -200,7 +193,7 @@
     <field name="identifier" type="string" stored="true" indexed="true" multiValued="false"/>
     <field name="persistentUrl" type="string" stored="true" indexed="false" multiValued="false"/>
     <field name="unf" type="string" stored="true" indexed="true" multiValued="false"/>
-    <field name="fileSizeInBytes" type="long" stored="true" indexed="true" multiValued="false"/>
+    <field name="fileSizeInBytes" type="plong" stored="true" indexed="true" multiValued="false"/>
     <field name="fileMd5" type="string" stored="true" indexed="true" multiValued="false"/>
     <field name="fileChecksumType" type="string" stored="true" indexed="true" multiValued="false"/>
     <field name="fileChecksumValue" type="string" stored="true" indexed="true" multiValued="false"/>
@@ -208,9 +201,9 @@
     <field name="deaccessionReason" type="string" stored="true" indexed="false" multiValued="false"/>
 
     <!-- Added for Dataverse 4.0 alpha 1. This is a required field so we don't have to go to the database to get the database id of the entity. On cards we use the id in links -->
-    <field name="entityId" type="long" stored="true" indexed="true" multiValued="false"/>
+    <field name="entityId" type="plong" stored="true" indexed="true" multiValued="false"/>
 
-    <field name="datasetVersionId" type="long" stored="true" indexed="true" multiValued="false"/>
+    <field name="datasetVersionId" type="plong" stored="true" indexed="true" multiValued="false"/>
 
     <!-- Added for Dataverse 4.0 alpha 1 to sort by name  -->
     <!-- https://redmine.hmdc.harvard.edu/issues/3482 -->
@@ -218,10 +211,10 @@
     <!-- http://stackoverflow.com/questions/13360706/solr-4-0-alphabetical-sorting-trouble/13361226#13361226 -->
     <field name="nameSort" type="alphaOnlySort" indexed="true" stored="true"/>
 
-    <field name="dateSort" type="date" indexed="true" stored="true"/>
+    <field name="dateSort" type="pdate" indexed="true" stored="true"/>
 
     <!-- Added for Dataverse 4.0: release date https://redmine.hmdc.harvard.edu/issues/3592 -->
-    <field name="releasedate" type="int" indexed="true" stored="true"/>
+    <field name="releasedate" type="pint" indexed="true" stored="true"/>
 
     <!-- Added for Dataverse 4.0: do we want a description field that applies to dataverses, datasets, and files? https://redmine.hmdc.harvard.edu/issues/3745 -->
     <field name="description" type="text_en" multiValued="false" stored="true" indexed="true"/>
@@ -658,7 +651,8 @@
     <!-- Dynamic field definitions allow using convention over configuration
        for fields via the specification of patterns to match field names.
        EXAMPLE:  name="*_i" will match any field ending in _i (like myid_i, z_i)
-       RESTRICTION: the glob-like pattern in the name attribute must have a "*" only at the start or the end.  -->
+       RESTRICTION: the glob-like pattern in the name attribute must have a "*"
+       only at the start or the end.  -->
    
     <dynamicField name="*_i"  type="pint"    indexed="true"  stored="true"/>
     <dynamicField name="*_is" type="pints"    indexed="true"  stored="true"/>
@@ -666,19 +660,23 @@
     <dynamicField name="*_ss" type="strings"  indexed="true"  stored="true"/>
     <dynamicField name="*_l"  type="plong"   indexed="true"  stored="true"/>
     <dynamicField name="*_ls" type="plongs"   indexed="true"  stored="true"/>
-    <dynamicField name="*_txt" type="text_general" indexed="true" stored="true"/>
     <dynamicField name="*_b"  type="boolean" indexed="true" stored="true"/>
     <dynamicField name="*_bs" type="booleans" indexed="true" stored="true"/>
     <dynamicField name="*_f"  type="pfloat"  indexed="true"  stored="true"/>
     <dynamicField name="*_fs" type="pfloats"  indexed="true"  stored="true"/>
     <dynamicField name="*_d"  type="pdouble" indexed="true"  stored="true"/>
     <dynamicField name="*_ds" type="pdoubles" indexed="true"  stored="true"/>
+    <dynamicField name="*_dt"  type="pdate"    indexed="true"  stored="true"/>
+    <dynamicField name="*_dts" type="pdates"   indexed="true"  stored="true"/>
+    <dynamicField name="*_t"   type="text_general" indexed="true" stored="true" multiValued="false"/>
+    <dynamicField name="*_txt" type="text_general" indexed="true" stored="true"/>
+    
+    <dynamicField name="random_*" type="random"/>
+    <dynamicField name="ignored_*" type="ignored"/>
 
     <!-- Type used for data-driven schema, to add a string copy for each text field -->
-    <dynamicField name="*_str" type="strings" stored="false" docValues="true" indexed="false" />
-
-    <dynamicField name="*_dt"  type="pdate"    indexed="true"  stored="true"/>
-    <dynamicField name="*_dts" type="pdate"    indexed="true"  stored="true" multiValued="true"/>
+    <dynamicField name="*_str" type="strings" stored="false" docValues="true" indexed="false" useDocValuesAsStored="false" />
+    
     <dynamicField name="*_p"  type="location" indexed="true" stored="true"/>
     <dynamicField name="*_srpt"  type="location_rpt" indexed="true" stored="true"/>
     
@@ -724,43 +722,6 @@
          field first in an ascending sort and last in a descending sort.
     -->
 
-<fieldType name="int" class="solr.TrieIntField" precisionStep="0" positionIncrementGap="0"/>
-<fieldType name="float" class="solr.TrieFloatField" precisionStep="0" positionIncrementGap="0"/>
-<fieldType name="long" class="solr.TrieLongField" precisionStep="0" positionIncrementGap="0"/>
-<fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" positionIncrementGap="0"/>
-
-<fieldType name="tint" class="solr.TrieIntField" precisionStep="8" positionIncrementGap="0"/>
-<fieldType name="tfloat" class="solr.TrieFloatField" precisionStep="8" positionIncrementGap="0"/>
-<fieldType name="tlong" class="solr.TrieLongField" precisionStep="8" positionIncrementGap="0"/>
-<fieldType name="tdouble" class="solr.TrieDoubleField" precisionStep="8" positionIncrementGap="0"/>
-
-<!-- The format for this date field is of the form 1995-12-31T23:59:59Z, and
-        is a more restricted form of the canonical representation of dateTime
-        http://www.w3.org/TR/xmlschema-2/#dateTime    
-        The trailing "Z" designates UTC time and is mandatory.
-        Optional fractional seconds are allowed: 1995-12-31T23:59:59.999Z
-        All other components are mandatory.
-
-        Expressions can also be used to denote calculations that should be
-        performed relative to "NOW" to determine the value, ie...
-
-            NOW/HOUR
-                ... Round to the start of the current hour
-            NOW-1DAY
-                ... Exactly 1 day prior to now
-            NOW/DAY+6MONTHS+3DAYS
-                ... 6 months and 3 days in the future from the start of
-                    the current day
-                    
-        Consult the DateField javadocs for more information.
-
-        Note: For faster range queries, consider the tdate type
-    -->
-    <fieldType name="date" class="solr.TrieDateField" precisionStep="0" positionIncrementGap="0"/>
-
-    <!-- A Trie based date field for faster date range queries and date faceting. -->
-    <fieldType name="tdate" class="solr.TrieDateField" precisionStep="6" positionIncrementGap="0"/>
-
     <!-- This is an example of using the KeywordTokenizer along
         With various TokenFilterFactories to produce a sortable field
         that does not include some properties of the source text
@@ -815,6 +776,11 @@
     <fieldType name="pfloats" class="solr.FloatPointField" docValues="true" multiValued="true"/>
     <fieldType name="plongs" class="solr.LongPointField" docValues="true" multiValued="true"/>
     <fieldType name="pdoubles" class="solr.DoublePointField" docValues="true" multiValued="true"/>
+    <fieldType name="random" class="solr.RandomSortField" indexed="true"/>
+    
+    <!-- since fields of this type are by default not stored or indexed,
+       any data added to them will be ignored outright.  -->
+    <fieldType name="ignored" stored="false" indexed="false" multiValued="true" class="solr.StrField" />
 
     <!-- The format for this date field is of the form 1995-12-31T23:59:59Z, and
          is a more restricted form of the canonical representation of dateTime
@@ -841,7 +807,14 @@
     
     <!--Binary data type. The data should be sent/retrieved in as Base64 encoded Strings -->
     <fieldType name="binary" class="solr.BinaryField"/>
-
+    
+    <!--
+    RankFields can be used to store scoring factors to improve document ranking. They should be used
+    in combination with RankQParserPlugin.
+    (experimental)
+    -->
+    <fieldType name="rank" class="solr.RankField"/>
+    
     <!-- solr.TextField allows the specification of custom text analyzers
          specified as a tokenizer and a list of token filters. Different
          analyzers may be specified for indexing and querying.
@@ -851,7 +824,7 @@
          matching across fields.
 
          For more info on customizing your analyzer chain, please see
-         http://lucene.apache.org/solr/guide/understanding-analyzers-tokenizers-and-filters.html#understanding-analyzers-tokenizers-and-filters
+         https://solr.apache.org/guide/solr/latest/indexing-guide/document-analysis.html#using-analyzers-tokenizers-and-filters
      -->
 
     <!-- One can also specify an existing Analyzer class that has a
@@ -866,7 +839,7 @@
     <dynamicField name="*_ws" type="text_ws"  indexed="true"  stored="true"/>
     <fieldType name="text_ws" class="solr.TextField" positionIncrementGap="100">
       <analyzer>
-        <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+        <tokenizer name="whitespace"/>
       </analyzer>
     </fieldType>
 
@@ -893,6 +866,30 @@
         <filter class="solr.LowerCaseFilterFactory"/>
       </analyzer>
     </fieldType>
+    
+    <!-- SortableTextField generaly functions exactly like TextField,
+        except that it supports, and by default uses, docValues for sorting (or faceting)
+        on the first 1024 characters of the original field values (which is configurable).
+
+        This makes it a bit more useful then TextField in many situations, but the trade-off
+        is that it takes up more space on disk; which is why it's not used in place of TextField
+        for every fieldType in this _default schema.
+    -->
+    <dynamicField name="*_t_sort" type="text_gen_sort" indexed="true" stored="true" multiValued="false"/>
+    <dynamicField name="*_txt_sort" type="text_gen_sort" indexed="true" stored="true"/>
+    <fieldType name="text_gen_sort" class="solr.SortableTextField" positionIncrementGap="100" multiValued="true">
+      <analyzer type="index">
+        <tokenizer name="standard"/>
+        <filter name="stop" ignoreCase="true" words="stopwords.txt" />
+        <filter name="lowercase"/>
+      </analyzer>
+      <analyzer type="query">
+        <tokenizer name="standard"/>
+        <filter name="stop" ignoreCase="true" words="stopwords.txt" />
+        <filter name="synonymGraph" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
+        <filter name="lowercase"/>
+      </analyzer>
+    </fieldType>
 
     <!-- A text field with defaults appropriate for English: it tokenizes with StandardTokenizer,
          removes English stop words (lang/stopwords_en.txt), down cases, protects words from protwords.txt, and
diff --git a/conf/solr/8.11.1/solrconfig.xml b/conf/solr/9.3.0/solrconfig.xml
similarity index 67%
rename from conf/solr/8.11.1/solrconfig.xml
rename to conf/solr/9.3.0/solrconfig.xml
index 3e4e5adc7b6..b89315cdaa9 100644
--- a/conf/solr/8.11.1/solrconfig.xml
+++ b/conf/solr/9.3.0/solrconfig.xml
@@ -1,1410 +1,1176 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<!--
-     For more details about configurations options that may appear in
-     this file, see http://wiki.apache.org/solr/SolrConfigXml.
--->
-<config>
-  <!-- In all configuration below, a prefix of "solr." for class names
-       is an alias that causes solr to search appropriate packages,
-       including org.apache.solr.(search|update|request|core|analysis)
-
-       You may also specify a fully qualified Java classname if you
-       have your own custom plugins.
-    -->
-
-  <!-- Controls what version of Lucene various components of Solr
-       adhere to.  Generally, you want to use the latest version to
-       get all bug fixes and improvements. It is highly recommended
-       that you fully re-index after changing this setting as it can
-       affect both how text is indexed and queried.
-  -->
-  <luceneMatchVersion>7.3.0</luceneMatchVersion>
-
-  <!-- <lib/> directives can be used to instruct Solr to load any Jars
-       identified and use them to resolve any "plugins" specified in
-       your solrconfig.xml or schema.xml (ie: Analyzers, Request
-       Handlers, etc...).
-
-       All directories and paths are resolved relative to the
-       instanceDir.
-
-       Please note that <lib/> directives are processed in the order
-       that they appear in your solrconfig.xml file, and are "stacked"
-       on top of each other when building a ClassLoader - so if you have
-       plugin jars with dependencies on other jars, the "lower level"
-       dependency jars should be loaded first.
-
-       If a "./lib" directory exists in your instanceDir, all files
-       found in it are included as if you had used the following
-       syntax...
-
-              <lib dir="./lib" />
-    -->
-
-  <!-- A 'dir' option by itself adds any files found in the directory
-       to the classpath, this is useful for including all jars in a
-       directory.
-
-       When a 'regex' is specified in addition to a 'dir', only the
-       files in that directory which completely match the regex
-       (anchored on both ends) will be included.
-
-       If a 'dir' option (with or without a regex) is used and nothing
-       is found that matches, a warning will be logged.
-
-       The examples below can be used to load some solr-contribs along
-       with their external dependencies.
-    -->
-  <lib dir="${solr.install.dir:../../../..}/contrib/extraction/lib" regex=".*\.jar" />
-  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-cell-\d.*\.jar" />
-
-  <lib dir="${solr.install.dir:../../../..}/contrib/clustering/lib/" regex=".*\.jar" />
-  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-clustering-\d.*\.jar" />
-
-  <lib dir="${solr.install.dir:../../../..}/contrib/langid/lib/" regex=".*\.jar" />
-  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-langid-\d.*\.jar" />
-
-  <lib dir="${solr.install.dir:../../../..}/contrib/velocity/lib" regex=".*\.jar" />
-  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-velocity-\d.*\.jar" />
-  <!-- an exact 'path' can be used instead of a 'dir' to specify a
-       specific jar file.  This will cause a serious error to be logged
-       if it can't be loaded.
-    -->
-  <!--
-     <lib path="../a-jar-that-does-not-exist.jar" />
-  -->
-
-  <!-- Data Directory
-
-       Used to specify an alternate directory to hold all index data
-       other than the default ./data under the Solr home.  If
-       replication is in use, this should match the replication
-       configuration.
-    -->
-  <dataDir>${solr.data.dir:}</dataDir>
-
-
-  <!-- The DirectoryFactory to use for indexes.
-
-       solr.StandardDirectoryFactory is filesystem
-       based and tries to pick the best implementation for the current
-       JVM and platform.  solr.NRTCachingDirectoryFactory, the default,
-       wraps solr.StandardDirectoryFactory and caches small files in memory
-       for better NRT performance.
-
-       One can force a particular implementation via solr.MMapDirectoryFactory,
-       solr.NIOFSDirectoryFactory, or solr.SimpleFSDirectoryFactory.
-
-       solr.RAMDirectoryFactory is memory based and not persistent.
-    -->
-  <directoryFactory name="DirectoryFactory"
-                    class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}"/>
-
-  <!-- The CodecFactory for defining the format of the inverted index.
-       The default implementation is SchemaCodecFactory, which is the official Lucene
-       index format, but hooks into the schema to provide per-field customization of
-       the postings lists and per-document values in the fieldType element
-       (postingsFormat/docValuesFormat). Note that most of the alternative implementations
-       are experimental, so if you choose to customize the index format, it's a good
-       idea to convert back to the official format e.g. via IndexWriter.addIndexes(IndexReader)
-       before upgrading to a newer version to avoid unnecessary reindexing.
-       A "compressionMode" string element can be added to <codecFactory> to choose
-       between the existing compression modes in the default codec: "BEST_SPEED" (default)
-       or "BEST_COMPRESSION".
-  -->
-  <codecFactory class="solr.SchemaCodecFactory"/>
-
-  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-       Index Config - These settings control low-level behavior of indexing
-       Most example settings here show the default value, but are commented
-       out, to more easily see where customizations have been made.
-
-       Note: This replaces <indexDefaults> and <mainIndex> from older versions
-       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
-  <indexConfig>
-    <!-- maxFieldLength was removed in 4.0. To get similar behavior, include a
-         LimitTokenCountFilterFactory in your fieldType definition. E.g.
-     <filter class="solr.LimitTokenCountFilterFactory" maxTokenCount="10000"/>
-    -->
-    <!-- Maximum time to wait for a write lock (ms) for an IndexWriter. Default: 1000 -->
-    <!-- <writeLockTimeout>1000</writeLockTimeout>  -->
-
-    <!-- Expert: Enabling compound file will use less files for the index,
-         using fewer file descriptors on the expense of performance decrease.
-         Default in Lucene is "true". Default in Solr is "false" (since 3.6) -->
-    <!-- <useCompoundFile>false</useCompoundFile> -->
-
-    <!-- ramBufferSizeMB sets the amount of RAM that may be used by Lucene
-         indexing for buffering added documents and deletions before they are
-         flushed to the Directory.
-         maxBufferedDocs sets a limit on the number of documents buffered
-         before flushing.
-         If both ramBufferSizeMB and maxBufferedDocs is set, then
-         Lucene will flush based on whichever limit is hit first.  -->
-    <!-- <ramBufferSizeMB>100</ramBufferSizeMB> -->
-    <!-- <maxBufferedDocs>1000</maxBufferedDocs> -->
-
-    <!-- Expert: Merge Policy
-         The Merge Policy in Lucene controls how merging of segments is done.
-         The default since Solr/Lucene 3.3 is TieredMergePolicy.
-         The default since Lucene 2.3 was the LogByteSizeMergePolicy,
-         Even older versions of Lucene used LogDocMergePolicy.
-      -->
-    <!--
-        <mergePolicyFactory class="org.apache.solr.index.TieredMergePolicyFactory">
-          <int name="maxMergeAtOnce">10</int>
-          <int name="segmentsPerTier">10</int>
-          <double name="noCFSRatio">0.1</double>
-        </mergePolicyFactory>
-      -->
-
-    <!-- Expert: Merge Scheduler
-         The Merge Scheduler in Lucene controls how merges are
-         performed.  The ConcurrentMergeScheduler (Lucene 2.3 default)
-         can perform merges in the background using separate threads.
-         The SerialMergeScheduler (Lucene 2.2 default) does not.
-     -->
-    <!--
-       <mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>
-       -->
-
-    <!-- LockFactory
-
-         This option specifies which Lucene LockFactory implementation
-         to use.
-
-         single = SingleInstanceLockFactory - suggested for a
-                  read-only index or when there is no possibility of
-                  another process trying to modify the index.
-         native = NativeFSLockFactory - uses OS native file locking.
-                  Do not use when multiple solr webapps in the same
-                  JVM are attempting to share a single index.
-         simple = SimpleFSLockFactory  - uses a plain file for locking
-
-         Defaults: 'native' is default for Solr3.6 and later, otherwise
-                   'simple' is the default
-
-         More details on the nuances of each LockFactory...
-         http://wiki.apache.org/lucene-java/AvailableLockFactories
-    -->
-    <lockType>${solr.lock.type:native}</lockType>
-
-    <!-- Commit Deletion Policy
-         Custom deletion policies can be specified here. The class must
-         implement org.apache.lucene.index.IndexDeletionPolicy.
-
-         The default Solr IndexDeletionPolicy implementation supports
-         deleting index commit points on number of commits, age of
-         commit point and optimized status.
-
-         The latest commit point should always be preserved regardless
-         of the criteria.
-    -->
-    <!--
-    <deletionPolicy class="solr.SolrDeletionPolicy">
-    -->
-    <!-- The number of commit points to be kept -->
-    <!-- <str name="maxCommitsToKeep">1</str> -->
-    <!-- The number of optimized commit points to be kept -->
-    <!-- <str name="maxOptimizedCommitsToKeep">0</str> -->
-    <!--
-        Delete all commit points once they have reached the given age.
-        Supports DateMathParser syntax e.g.
-      -->
-    <!--
-       <str name="maxCommitAge">30MINUTES</str>
-       <str name="maxCommitAge">1DAY</str>
-    -->
-    <!--
-    </deletionPolicy>
-    -->
-
-    <!-- Lucene Infostream
-
-         To aid in advanced debugging, Lucene provides an "InfoStream"
-         of detailed information when indexing.
-
-         Setting The value to true will instruct the underlying Lucene
-         IndexWriter to write its debugging info the specified file
-      -->
-    <!-- <infoStream file="INFOSTREAM.txt">false</infoStream> -->
-  </indexConfig>
-
-
-  <!-- JMX
-
-       This example enables JMX if and only if an existing MBeanServer
-       is found, use this if you want to configure JMX through JVM
-       parameters. Remove this to disable exposing Solr configuration
-       and statistics to JMX.
-
-       For more details see http://wiki.apache.org/solr/SolrJmx
-    -->
-  <jmx />
-  <!-- If you want to connect to a particular server, specify the
-       agentId
-    -->
-  <!-- <jmx agentId="myAgent" /> -->
-  <!-- If you want to start a new MBeanServer, specify the serviceUrl -->
-  <!-- <jmx serviceUrl="service:jmx:rmi:///jndi/rmi://localhost:9999/solr"/>
-    -->
-
-  <!-- The default high-performance update handler -->
-  <updateHandler class="solr.DirectUpdateHandler2">
-
-    <!-- Enables a transaction log, used for real-time get, durability, and
-         and solr cloud replica recovery.  The log can grow as big as
-         uncommitted changes to the index, so use of a hard autoCommit
-         is recommended (see below).
-         "dir" - the target directory for transaction logs, defaults to the
-                solr data directory.
-         "numVersionBuckets" - sets the number of buckets used to keep
-                track of max version values when checking for re-ordered
-                updates; increase this value to reduce the cost of
-                synchronizing access to version buckets during high-volume
-                indexing, this requires 8 bytes (long) * numVersionBuckets
-                of heap space per Solr core.
-    -->
-    <updateLog>
-      <str name="dir">${solr.ulog.dir:}</str>
-      <int name="numVersionBuckets">${solr.ulog.numVersionBuckets:65536}</int>
-    </updateLog>
-
-    <!-- AutoCommit
-
-         Perform a hard commit automatically under certain conditions.
-         Instead of enabling autoCommit, consider using "commitWithin"
-         when adding documents.
-
-         http://wiki.apache.org/solr/UpdateXmlMessages
-
-         maxDocs - Maximum number of documents to add since the last
-                   commit before automatically triggering a new commit.
-
-         maxTime - Maximum amount of time in ms that is allowed to pass
-                   since a document was added before automatically
-                   triggering a new commit.
-         openSearcher - if false, the commit causes recent index changes
-           to be flushed to stable storage, but does not cause a new
-           searcher to be opened to make those changes visible.
-
-         If the updateLog is enabled, then it's highly recommended to
-         have some sort of hard autoCommit to limit the log size.
-      -->
-    <autoCommit>
-      <maxTime>${solr.autoCommit.maxTime:15000}</maxTime>
-      <openSearcher>false</openSearcher>
-    </autoCommit>
-
-    <!-- softAutoCommit is like autoCommit except it causes a
-         'soft' commit which only ensures that changes are visible
-         but does not ensure that data is synced to disk.  This is
-         faster and more near-realtime friendly than a hard commit.
-      -->
-
-    <autoSoftCommit>
-      <maxTime>${solr.autoSoftCommit.maxTime:-1}</maxTime>
-    </autoSoftCommit>
-
-    <!-- Update Related Event Listeners
-
-         Various IndexWriter related events can trigger Listeners to
-         take actions.
-
-         postCommit - fired after every commit or optimize command
-         postOptimize - fired after every optimize command
-      -->
-
-  </updateHandler>
-
-  <!-- IndexReaderFactory
-
-       Use the following format to specify a custom IndexReaderFactory,
-       which allows for alternate IndexReader implementations.
-
-       ** Experimental Feature **
-
-       Please note - Using a custom IndexReaderFactory may prevent
-       certain other features from working. The API to
-       IndexReaderFactory may change without warning or may even be
-       removed from future releases if the problems cannot be
-       resolved.
-
-
-       ** Features that may not work with custom IndexReaderFactory **
-
-       The ReplicationHandler assumes a disk-resident index. Using a
-       custom IndexReader implementation may cause incompatibility
-       with ReplicationHandler and may cause replication to not work
-       correctly. See SOLR-1366 for details.
-
-    -->
-  <!--
-  <indexReaderFactory name="IndexReaderFactory" class="package.class">
-    <str name="someArg">Some Value</str>
-  </indexReaderFactory >
-  -->
-
-  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-       Query section - these settings control query time things like caches
-       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
-  <query>
-
-    <!-- Maximum number of clauses in each BooleanQuery,  an exception
-         is thrown if exceeded.  It is safe to increase or remove this setting,
-         since it is purely an arbitrary limit to try and catch user errors where
-         large boolean queries may not be the best implementation choice.
-      -->
-    <maxBooleanClauses>1024</maxBooleanClauses>
-
-    <!-- Solr Internal Query Caches
-
-         There are two implementations of cache available for Solr,
-         LRUCache, based on a synchronized LinkedHashMap, and
-         FastLRUCache, based on a ConcurrentHashMap.
-
-         FastLRUCache has faster gets and slower puts in single
-         threaded operation and thus is generally faster than LRUCache
-         when the hit ratio of the cache is high (> 75%), and may be
-         faster under other scenarios on multi-cpu systems.
-    -->
-
-    <!-- Filter Cache
-
-         Cache used by SolrIndexSearcher for filters (DocSets),
-         unordered sets of *all* documents that match a query.  When a
-         new searcher is opened, its caches may be prepopulated or
-         "autowarmed" using data from caches in the old searcher.
-         autowarmCount is the number of items to prepopulate.  For
-         LRUCache, the autowarmed items will be the most recently
-         accessed items.
-
-         Parameters:
-           class - the SolrCache implementation LRUCache or
-               (LRUCache or FastLRUCache)
-           size - the maximum number of entries in the cache
-           initialSize - the initial capacity (number of entries) of
-               the cache.  (see java.util.HashMap)
-           autowarmCount - the number of entries to prepopulate from
-               and old cache.
-           maxRamMB - the maximum amount of RAM (in MB) that this cache is allowed
-                      to occupy. Note that when this option is specified, the size
-                      and initialSize parameters are ignored.
-      -->
-    <filterCache class="solr.search.CaffeineCache"
-                 size="512"
-                 initialSize="512"
-                 autowarmCount="0"/>
-
-    <!-- Query Result Cache
-
-         Caches results of searches - ordered lists of document ids
-         (DocList) based on a query, a sort, and the range of documents requested.
-         Additional supported parameter by LRUCache:
-            maxRamMB - the maximum amount of RAM (in MB) that this cache is allowed
-                       to occupy
-      -->
-    <queryResultCache class="solr.search.CaffeineCache"
-                      size="512"
-                      initialSize="512"
-                      autowarmCount="0"/>
-
-    <!-- Document Cache
-
-         Caches Lucene Document objects (the stored fields for each
-         document).  Since Lucene internal document ids are transient,
-         this cache will not be autowarmed.
-      -->
-    <documentCache class="solr.search.CaffeineCache"
-                   size="512"
-                   initialSize="512"
-                   autowarmCount="0"/>
-
-    <!-- custom cache currently used by block join -->
-    <cache name="perSegFilter"
-           class="solr.search.CaffeineCache"
-           size="10"
-           initialSize="0"
-           autowarmCount="10"
-           regenerator="solr.NoOpRegenerator" />
-
-    <!-- Field Value Cache
-
-         Cache used to hold field values that are quickly accessible
-         by document id.  The fieldValueCache is created by default
-         even if not configured here.
-      -->
-    <!--
-       <fieldValueCache class="solr.FastLRUCache"
-                        size="512"
-                        autowarmCount="128"
-                        showItems="32" />
-      -->
-
-    <!-- Custom Cache
-
-         Example of a generic cache.  These caches may be accessed by
-         name through SolrIndexSearcher.getCache(),cacheLookup(), and
-         cacheInsert().  The purpose is to enable easy caching of
-         user/application level data.  The regenerator argument should
-         be specified as an implementation of solr.CacheRegenerator
-         if autowarming is desired.
-      -->
-    <!--
-       <cache name="myUserCache"
-              class="solr.LRUCache"
-              size="4096"
-              initialSize="1024"
-              autowarmCount="1024"
-              regenerator="com.mycompany.MyRegenerator"
-              />
-      -->
-
-
-    <!-- Lazy Field Loading
-
-         If true, stored fields that are not requested will be loaded
-         lazily.  This can result in a significant speed improvement
-         if the usual case is to not load all stored fields,
-         especially if the skipped fields are large compressed text
-         fields.
-    -->
-    <enableLazyFieldLoading>true</enableLazyFieldLoading>
-
-    <!-- Use Filter For Sorted Query
-
-         A possible optimization that attempts to use a filter to
-         satisfy a search.  If the requested sort does not include
-         score, then the filterCache will be checked for a filter
-         matching the query. If found, the filter will be used as the
-         source of document ids, and then the sort will be applied to
-         that.
-
-         For most situations, this will not be useful unless you
-         frequently get the same search repeatedly with different sort
-         options, and none of them ever use "score"
-      -->
-    <!--
-       <useFilterForSortedQuery>true</useFilterForSortedQuery>
-      -->
-
-    <!-- Result Window Size
-
-         An optimization for use with the queryResultCache.  When a search
-         is requested, a superset of the requested number of document ids
-         are collected.  For example, if a search for a particular query
-         requests matching documents 10 through 19, and queryWindowSize is 50,
-         then documents 0 through 49 will be collected and cached.  Any further
-         requests in that range can be satisfied via the cache.
-      -->
-    <queryResultWindowSize>20</queryResultWindowSize>
-
-    <!-- Maximum number of documents to cache for any entry in the
-         queryResultCache.
-      -->
-    <queryResultMaxDocsCached>200</queryResultMaxDocsCached>
-
-    <!-- Query Related Event Listeners
-
-         Various IndexSearcher related events can trigger Listeners to
-         take actions.
-
-         newSearcher - fired whenever a new searcher is being prepared
-         and there is a current searcher handling requests (aka
-         registered).  It can be used to prime certain caches to
-         prevent long request times for certain requests.
-
-         firstSearcher - fired whenever a new searcher is being
-         prepared but there is no current registered searcher to handle
-         requests or to gain autowarming data from.
-
-
-      -->
-    <!-- QuerySenderListener takes an array of NamedList and executes a
-         local query request for each NamedList in sequence.
-      -->
-    <listener event="newSearcher" class="solr.QuerySenderListener">
-      <arr name="queries">
-        <!--
-           <lst><str name="q">solr</str><str name="sort">price asc</str></lst>
-           <lst><str name="q">rocks</str><str name="sort">weight asc</str></lst>
-          -->
-      </arr>
-    </listener>
-    <listener event="firstSearcher" class="solr.QuerySenderListener">
-      <arr name="queries">
-        <!--
-        <lst>
-          <str name="q">static firstSearcher warming in solrconfig.xml</str>
-        </lst>
-        -->
-      </arr>
-    </listener>
-
-    <!-- Use Cold Searcher
-
-         If a search request comes in and there is no current
-         registered searcher, then immediately register the still
-         warming searcher and use it.  If "false" then all requests
-         will block until the first searcher is done warming.
-      -->
-    <useColdSearcher>false</useColdSearcher>
-
-  </query>
-
-
-  <!-- Request Dispatcher
-
-       This section contains instructions for how the SolrDispatchFilter
-       should behave when processing requests for this SolrCore.
-
-    -->
-  <requestDispatcher>
-    <!-- Request Parsing
-
-         These settings indicate how Solr Requests may be parsed, and
-         what restrictions may be placed on the ContentStreams from
-         those requests
-
-         enableRemoteStreaming - enables use of the stream.file
-         and stream.url parameters for specifying remote streams.
-
-         multipartUploadLimitInKB - specifies the max size (in KiB) of
-         Multipart File Uploads that Solr will allow in a Request.
-
-         formdataUploadLimitInKB - specifies the max size (in KiB) of
-         form data (application/x-www-form-urlencoded) sent via
-         POST. You can use POST to pass request parameters not
-         fitting into the URL.
-
-         addHttpRequestToContext - if set to true, it will instruct
-         the requestParsers to include the original HttpServletRequest
-         object in the context map of the SolrQueryRequest under the
-         key "httpRequest". It will not be used by any of the existing
-         Solr components, but may be useful when developing custom
-         plugins.
-
-         *** WARNING ***
-         Before enabling remote streaming, you should make sure your
-         system has authentication enabled.
-
-    <requestParsers enableRemoteStreaming="false"
-                    multipartUploadLimitInKB="-1"
-                    formdataUploadLimitInKB="-1"
-                    addHttpRequestToContext="false"/>
-      -->
-
-    <!-- HTTP Caching
-
-         Set HTTP caching related parameters (for proxy caches and clients).
-
-         The options below instruct Solr not to output any HTTP Caching
-         related headers
-      -->
-    <httpCaching never304="true" />
-    <!-- If you include a <cacheControl> directive, it will be used to
-         generate a Cache-Control header (as well as an Expires header
-         if the value contains "max-age=")
-
-         By default, no Cache-Control header is generated.
-
-         You can use the <cacheControl> option even if you have set
-         never304="true"
-      -->
-    <!--
-       <httpCaching never304="true" >
-         <cacheControl>max-age=30, public</cacheControl>
-       </httpCaching>
-      -->
-    <!-- To enable Solr to respond with automatically generated HTTP
-         Caching headers, and to response to Cache Validation requests
-         correctly, set the value of never304="false"
-
-         This will cause Solr to generate Last-Modified and ETag
-         headers based on the properties of the Index.
-
-         The following options can also be specified to affect the
-         values of these headers...
-
-         lastModFrom - the default value is "openTime" which means the
-         Last-Modified value (and validation against If-Modified-Since
-         requests) will all be relative to when the current Searcher
-         was opened.  You can change it to lastModFrom="dirLastMod" if
-         you want the value to exactly correspond to when the physical
-         index was last modified.
-
-         etagSeed="..." is an option you can change to force the ETag
-         header (and validation against If-None-Match requests) to be
-         different even if the index has not changed (ie: when making
-         significant changes to your config file)
-
-         (lastModifiedFrom and etagSeed are both ignored if you use
-         the never304="true" option)
-      -->
-    <!--
-       <httpCaching lastModifiedFrom="openTime"
-                    etagSeed="Solr">
-         <cacheControl>max-age=30, public</cacheControl>
-       </httpCaching>
-      -->
-  </requestDispatcher>
-
-  <!-- Request Handlers
-
-       http://wiki.apache.org/solr/SolrRequestHandler
-
-       Incoming queries will be dispatched to a specific handler by name
-       based on the path specified in the request.
-
-       If a Request Handler is declared with startup="lazy", then it will
-       not be initialized until the first request that uses it.
-
-    -->
-  <!-- SearchHandler
-
-       http://wiki.apache.org/solr/SearchHandler
-
-       For processing Search Queries, the primary Request Handler
-       provided with Solr is "SearchHandler" It delegates to a sequent
-       of SearchComponents (see below) and supports distributed
-       queries across multiple shards
-    -->
-  <requestHandler name="/select" class="solr.SearchHandler">
-    <!-- default values for query parameters can be specified, these
-         will be overridden by parameters in the request
-      -->
-    <lst name="defaults">
-      <str name="echoParams">explicit</str>
-      <int name="rows">10</int>
-      <str name="defType">edismax</str>
-      <float name="tie">0.075</float>
-        <str name="qf">
-            dvName^400
-            authorName^180
-            dvSubject^190
-            dvDescription^180
-            dvAffiliation^170
-            title^130
-            subject^120
-            keyword^110
-            topicClassValue^100
-            dsDescriptionValue^90
-            authorAffiliation^80
-            publicationCitation^60
-            producerName^50
-            fileName^30
-            fileDescription^30
-            variableLabel^20
-            variableName^10
-            _text_^1.0
-        </str>
-        <str name="pf">
-            dvName^200
-            authorName^100
-            dvSubject^100
-            dvDescription^100
-            dvAffiliation^100
-            title^75
-            subject^75
-            keyword^75
-            topicClassValue^75
-            dsDescriptionValue^75
-            authorAffiliation^75
-            publicationCitation^75
-            producerName^75
-        </str>
-        <!-- Even though this number is huge it only seems to apply a boost of ~1.5x to final result -MAD 4.9.3--> 
-        <str name="bq">
-            isHarvested:false^25000
-        </str>
-
-      <!-- Default search field
-         <str name="df">text</str> 
-        -->
-      <!-- Change from JSON to XML format (the default prior to Solr 7.0)
-         <str name="wt">xml</str> 
-        -->
-    </lst>
-    <!-- In addition to defaults, "appends" params can be specified
-         to identify values which should be appended to the list of
-         multi-val params from the query (or the existing "defaults").
-      -->
-    <!-- In this example, the param "fq=instock:true" would be appended to
-         any query time fq params the user may specify, as a mechanism for
-         partitioning the index, independent of any user selected filtering
-         that may also be desired (perhaps as a result of faceted searching).
-
-         NOTE: there is *absolutely* nothing a client can do to prevent these
-         "appends" values from being used, so don't use this mechanism
-         unless you are sure you always want it.
-      -->
-    <!--
-       <lst name="appends">
-         <str name="fq">inStock:true</str>
-       </lst>
-      -->
-    <!-- "invariants" are a way of letting the Solr maintainer lock down
-         the options available to Solr clients.  Any params values
-         specified here are used regardless of what values may be specified
-         in either the query, the "defaults", or the "appends" params.
-
-         In this example, the facet.field and facet.query params would
-         be fixed, limiting the facets clients can use.  Faceting is
-         not turned on by default - but if the client does specify
-         facet=true in the request, these are the only facets they
-         will be able to see counts for; regardless of what other
-         facet.field or facet.query params they may specify.
-
-         NOTE: there is *absolutely* nothing a client can do to prevent these
-         "invariants" values from being used, so don't use this mechanism
-         unless you are sure you always want it.
-      -->
-    <!--
-       <lst name="invariants">
-         <str name="facet.field">cat</str>
-         <str name="facet.field">manu_exact</str>
-         <str name="facet.query">price:[* TO 500]</str>
-         <str name="facet.query">price:[500 TO *]</str>
-       </lst>
-      -->
-    <!-- If the default list of SearchComponents is not desired, that
-         list can either be overridden completely, or components can be
-         prepended or appended to the default list.  (see below)
-      -->
-    <!--
-       <arr name="components">
-         <str>nameOfCustomComponent1</str>
-         <str>nameOfCustomComponent2</str>
-       </arr>
-      -->
-  </requestHandler>
-
-  <!-- A request handler that returns indented JSON by default -->
-  <requestHandler name="/query" class="solr.SearchHandler">
-    <lst name="defaults">
-      <str name="echoParams">explicit</str>
-      <str name="wt">json</str>
-      <str name="indent">true</str>
-    </lst>
-  </requestHandler>
-
-
-  <!-- A Robust Example
-
-       This example SearchHandler declaration shows off usage of the
-       SearchHandler with many defaults declared
-
-       Note that multiple instances of the same Request Handler
-       (SearchHandler) can be registered multiple times with different
-       names (and different init parameters)
-    -->
-  <requestHandler name="/browse" class="solr.SearchHandler" useParams="query,facets,velocity,browse">
-    <lst name="defaults">
-      <str name="echoParams">explicit</str>
-    </lst>
-  </requestHandler>
-
-  <initParams path="/update/**,/query,/select,/tvrh,/elevate,/spell,/browse">
-    <lst name="defaults">
-      <str name="df">_text_</str>
-    </lst>
-  </initParams>
-
-  <!-- Solr Cell Update Request Handler
-
-       http://wiki.apache.org/solr/ExtractingRequestHandler
-
-    -->
-  <requestHandler name="/update/extract"
-                  startup="lazy"
-                  class="solr.extraction.ExtractingRequestHandler" >
-    <lst name="defaults">
-      <str name="lowernames">true</str>
-      <str name="fmap.meta">ignored_</str>
-      <str name="fmap.content">_text_</str>
-    </lst>
-  </requestHandler>
-
-  <!-- Search Components
-
-       Search components are registered to SolrCore and used by
-       instances of SearchHandler (which can access them by name)
-
-       By default, the following components are available:
-
-       <searchComponent name="query"     class="solr.QueryComponent" />
-       <searchComponent name="facet"     class="solr.FacetComponent" />
-       <searchComponent name="mlt"       class="solr.MoreLikeThisComponent" />
-       <searchComponent name="highlight" class="solr.HighlightComponent" />
-       <searchComponent name="stats"     class="solr.StatsComponent" />
-       <searchComponent name="debug"     class="solr.DebugComponent" />
-
-       Default configuration in a requestHandler would look like:
-
-       <arr name="components">
-         <str>query</str>
-         <str>facet</str>
-         <str>mlt</str>
-         <str>highlight</str>
-         <str>stats</str>
-         <str>debug</str>
-       </arr>
-
-       If you register a searchComponent to one of the standard names,
-       that will be used instead of the default.
-
-       To insert components before or after the 'standard' components, use:
-
-       <arr name="first-components">
-         <str>myFirstComponentName</str>
-       </arr>
-
-       <arr name="last-components">
-         <str>myLastComponentName</str>
-       </arr>
-
-       NOTE: The component registered with the name "debug" will
-       always be executed after the "last-components"
-
-     -->
-
-  <!-- Spell Check
-
-       The spell check component can return a list of alternative spelling
-       suggestions.
-
-       http://wiki.apache.org/solr/SpellCheckComponent
-    -->
-  <searchComponent name="spellcheck" class="solr.SpellCheckComponent">
-
-    <str name="queryAnalyzerFieldType">text_general</str>
-
-    <!-- Multiple "Spell Checkers" can be declared and used by this
-         component
-      -->
-
-    <!-- a spellchecker built from a field of the main index -->
-    <lst name="spellchecker">
-      <str name="name">default</str>
-      <str name="field">_text_</str>
-      <str name="classname">solr.DirectSolrSpellChecker</str>
-      <!-- the spellcheck distance measure used, the default is the internal levenshtein -->
-      <str name="distanceMeasure">internal</str>
-      <!-- minimum accuracy needed to be considered a valid spellcheck suggestion -->
-      <float name="accuracy">0.5</float>
-      <!-- the maximum #edits we consider when enumerating terms: can be 1 or 2 -->
-      <int name="maxEdits">2</int>
-      <!-- the minimum shared prefix when enumerating terms -->
-      <int name="minPrefix">1</int>
-      <!-- maximum number of inspections per result. -->
-      <int name="maxInspections">5</int>
-      <!-- minimum length of a query term to be considered for correction -->
-      <int name="minQueryLength">4</int>
-      <!-- maximum threshold of documents a query term can appear to be considered for correction -->
-      <float name="maxQueryFrequency">0.01</float>
-      <!-- uncomment this to require suggestions to occur in 1% of the documents
-        <float name="thresholdTokenFrequency">.01</float>
-      -->
-    </lst>
-
-    <!-- a spellchecker that can break or combine words.  See "/spell" handler below for usage -->
-    <!--
-    <lst name="spellchecker">
-      <str name="name">wordbreak</str>
-      <str name="classname">solr.WordBreakSolrSpellChecker</str>
-      <str name="field">name</str>
-      <str name="combineWords">true</str>
-      <str name="breakWords">true</str>
-      <int name="maxChanges">10</int>
-    </lst>
-    -->
-  </searchComponent>
-
-  <!-- A request handler for demonstrating the spellcheck component.
-
-       NOTE: This is purely as an example.  The whole purpose of the
-       SpellCheckComponent is to hook it into the request handler that
-       handles your normal user queries so that a separate request is
-       not needed to get suggestions.
-
-       IN OTHER WORDS, THERE IS REALLY GOOD CHANCE THE SETUP BELOW IS
-       NOT WHAT YOU WANT FOR YOUR PRODUCTION SYSTEM!
-
-       See http://wiki.apache.org/solr/SpellCheckComponent for details
-       on the request parameters.
-    -->
-  <requestHandler name="/spell" class="solr.SearchHandler" startup="lazy">
-    <lst name="defaults">
-      <!-- Solr will use suggestions from both the 'default' spellchecker
-           and from the 'wordbreak' spellchecker and combine them.
-           collations (re-written queries) can include a combination of
-           corrections from both spellcheckers -->
-      <str name="spellcheck.dictionary">default</str>
-      <str name="spellcheck">on</str>
-      <str name="spellcheck.extendedResults">true</str>
-      <str name="spellcheck.count">10</str>
-      <str name="spellcheck.alternativeTermCount">5</str>
-      <str name="spellcheck.maxResultsForSuggest">5</str>
-      <str name="spellcheck.collate">true</str>
-      <str name="spellcheck.collateExtendedResults">true</str>
-      <str name="spellcheck.maxCollationTries">10</str>
-      <str name="spellcheck.maxCollations">5</str>
-    </lst>
-    <arr name="last-components">
-      <str>spellcheck</str>
-    </arr>
-  </requestHandler>
-
-  <!-- Term Vector Component
-
-       http://wiki.apache.org/solr/TermVectorComponent
-    -->
-  <searchComponent name="tvComponent" class="solr.TermVectorComponent"/>
-
-  <!-- A request handler for demonstrating the term vector component
-
-       This is purely as an example.
-
-       In reality you will likely want to add the component to your
-       already specified request handlers.
-    -->
-  <requestHandler name="/tvrh" class="solr.SearchHandler" startup="lazy">
-    <lst name="defaults">
-      <bool name="tv">true</bool>
-    </lst>
-    <arr name="last-components">
-      <str>tvComponent</str>
-    </arr>
-  </requestHandler>
-
-  <!-- Clustering Component. (Omitted here. See the default Solr example for a typical configuration.) -->
-
-  <!-- Terms Component
-
-       http://wiki.apache.org/solr/TermsComponent
-
-       A component to return terms and document frequency of those
-       terms
-    -->
-  <searchComponent name="terms" class="solr.TermsComponent"/>
-
-  <!-- A request handler for demonstrating the terms component -->
-  <requestHandler name="/terms" class="solr.SearchHandler" startup="lazy">
-    <lst name="defaults">
-      <bool name="terms">true</bool>
-      <bool name="distrib">false</bool>
-    </lst>
-    <arr name="components">
-      <str>terms</str>
-    </arr>
-  </requestHandler>
-
-
-  <!-- Query Elevation Component
-
-       http://wiki.apache.org/solr/QueryElevationComponent
-
-       a search component that enables you to configure the top
-       results for a given query regardless of the normal lucene
-       scoring.
-    -->
-  <searchComponent name="elevator" class="solr.QueryElevationComponent" >
-    <!-- pick a fieldType to analyze queries -->
-    <str name="queryFieldType">string</str>
-  </searchComponent>
-
-  <!-- A request handler for demonstrating the elevator component -->
-  <requestHandler name="/elevate" class="solr.SearchHandler" startup="lazy">
-    <lst name="defaults">
-      <str name="echoParams">explicit</str>
-    </lst>
-    <arr name="last-components">
-      <str>elevator</str>
-    </arr>
-  </requestHandler>
-
-  <!-- Highlighting Component
-
-       http://wiki.apache.org/solr/HighlightingParameters
-    -->
-  <searchComponent class="solr.HighlightComponent" name="highlight">
-    <highlighting>
-      <!-- Configure the standard fragmenter -->
-      <!-- This could most likely be commented out in the "default" case -->
-      <fragmenter name="gap"
-                  default="true"
-                  class="solr.highlight.GapFragmenter">
-        <lst name="defaults">
-          <int name="hl.fragsize">100</int>
-        </lst>
-      </fragmenter>
-
-      <!-- A regular-expression-based fragmenter
-           (for sentence extraction)
-        -->
-      <fragmenter name="regex"
-                  class="solr.highlight.RegexFragmenter">
-        <lst name="defaults">
-          <!-- slightly smaller fragsizes work better because of slop -->
-          <int name="hl.fragsize">70</int>
-          <!-- allow 50% slop on fragment sizes -->
-          <float name="hl.regex.slop">0.5</float>
-          <!-- a basic sentence pattern -->
-          <str name="hl.regex.pattern">[-\w ,/\n\&quot;&apos;]{20,200}</str>
-        </lst>
-      </fragmenter>
-
-      <!-- Configure the standard formatter -->
-      <formatter name="html"
-                 default="true"
-                 class="solr.highlight.HtmlFormatter">
-        <lst name="defaults">
-          <str name="hl.simple.pre"><![CDATA[<em>]]></str>
-          <str name="hl.simple.post"><![CDATA[</em>]]></str>
-        </lst>
-      </formatter>
-
-      <!-- Configure the standard encoder -->
-      <encoder name="html"
-               class="solr.highlight.HtmlEncoder" />
-
-      <!-- Configure the standard fragListBuilder -->
-      <fragListBuilder name="simple"
-                       class="solr.highlight.SimpleFragListBuilder"/>
-
-      <!-- Configure the single fragListBuilder -->
-      <fragListBuilder name="single"
-                       class="solr.highlight.SingleFragListBuilder"/>
-
-      <!-- Configure the weighted fragListBuilder -->
-      <fragListBuilder name="weighted"
-                       default="true"
-                       class="solr.highlight.WeightedFragListBuilder"/>
-
-      <!-- default tag FragmentsBuilder -->
-      <fragmentsBuilder name="default"
-                        default="true"
-                        class="solr.highlight.ScoreOrderFragmentsBuilder">
-        <!--
-        <lst name="defaults">
-          <str name="hl.multiValuedSeparatorChar">/</str>
-        </lst>
-        -->
-      </fragmentsBuilder>
-
-      <!-- multi-colored tag FragmentsBuilder -->
-      <fragmentsBuilder name="colored"
-                        class="solr.highlight.ScoreOrderFragmentsBuilder">
-        <lst name="defaults">
-          <str name="hl.tag.pre"><![CDATA[
-               <b style="background:yellow">,<b style="background:lawgreen">,
-               <b style="background:aquamarine">,<b style="background:magenta">,
-               <b style="background:palegreen">,<b style="background:coral">,
-               <b style="background:wheat">,<b style="background:khaki">,
-               <b style="background:lime">,<b style="background:deepskyblue">]]></str>
-          <str name="hl.tag.post"><![CDATA[</b>]]></str>
-        </lst>
-      </fragmentsBuilder>
-
-      <boundaryScanner name="default"
-                       default="true"
-                       class="solr.highlight.SimpleBoundaryScanner">
-        <lst name="defaults">
-          <str name="hl.bs.maxScan">10</str>
-          <str name="hl.bs.chars">.,!? &#9;&#10;&#13;</str>
-        </lst>
-      </boundaryScanner>
-
-      <boundaryScanner name="breakIterator"
-                       class="solr.highlight.BreakIteratorBoundaryScanner">
-        <lst name="defaults">
-          <!-- type should be one of CHARACTER, WORD(default), LINE and SENTENCE -->
-          <str name="hl.bs.type">WORD</str>
-          <!-- language and country are used when constructing Locale object.  -->
-          <!-- And the Locale object will be used when getting instance of BreakIterator -->
-          <str name="hl.bs.language">en</str>
-          <str name="hl.bs.country">US</str>
-        </lst>
-      </boundaryScanner>
-    </highlighting>
-  </searchComponent>
-
-  <!-- Update Processors
-
-       Chains of Update Processor Factories for dealing with Update
-       Requests can be declared, and then used by name in Update
-       Request Processors
-
-       http://wiki.apache.org/solr/UpdateRequestProcessor
-
-    -->
-
-  <!-- Add unknown fields to the schema
-
-       Field type guessing update processors that will
-       attempt to parse string-typed field values as Booleans, Longs,
-       Doubles, or Dates, and then add schema fields with the guessed
-       field types. Text content will be indexed as "text_general" as
-       well as a copy to a plain string version in *_str.
-
-       These require that the schema is both managed and mutable, by
-       declaring schemaFactory as ManagedIndexSchemaFactory, with
-       mutable specified as true.
-
-       See http://wiki.apache.org/solr/GuessingFieldTypes
-    -->
-<schemaFactory class="ClassicIndexSchemaFactory"/>
-
-  <updateProcessor class="solr.UUIDUpdateProcessorFactory" name="uuid"/>
-  <updateProcessor class="solr.RemoveBlankFieldUpdateProcessorFactory" name="remove-blank"/>
-  <updateProcessor class="solr.FieldNameMutatingUpdateProcessorFactory" name="field-name-mutating">
-    <str name="pattern">[^\w-\.]</str>
-    <str name="replacement">_</str>
-  </updateProcessor>
-  <updateProcessor class="solr.ParseBooleanFieldUpdateProcessorFactory" name="parse-boolean"/>
-  <updateProcessor class="solr.ParseLongFieldUpdateProcessorFactory" name="parse-long"/>
-  <updateProcessor class="solr.ParseDoubleFieldUpdateProcessorFactory" name="parse-double"/>
-  <updateProcessor class="solr.ParseDateFieldUpdateProcessorFactory" name="parse-date">
-    <arr name="format">
-      <str>yyyy-MM-dd'T'HH:mm:ss.SSSZ</str>
-      <str>yyyy-MM-dd'T'HH:mm:ss,SSSZ</str>
-      <str>yyyy-MM-dd'T'HH:mm:ss.SSS</str>
-      <str>yyyy-MM-dd'T'HH:mm:ss,SSS</str>
-      <str>yyyy-MM-dd'T'HH:mm:ssZ</str>
-      <str>yyyy-MM-dd'T'HH:mm:ss</str>
-      <str>yyyy-MM-dd'T'HH:mmZ</str>
-      <str>yyyy-MM-dd'T'HH:mm</str>
-      <str>yyyy-MM-dd HH:mm:ss.SSSZ</str>
-      <str>yyyy-MM-dd HH:mm:ss,SSSZ</str>
-      <str>yyyy-MM-dd HH:mm:ss.SSS</str>
-      <str>yyyy-MM-dd HH:mm:ss,SSS</str>
-      <str>yyyy-MM-dd HH:mm:ssZ</str>
-      <str>yyyy-MM-dd HH:mm:ss</str>
-      <str>yyyy-MM-dd HH:mmZ</str>
-      <str>yyyy-MM-dd HH:mm</str>
-      <str>yyyy-MM-dd</str>
-    </arr>
-  </updateProcessor>
-
-  <!--Dataverse removed-->
-<!--  <updateProcessor class="solr.AddSchemaFieldsUpdateProcessorFactory" name="add-schema-fields">
-    <lst name="typeMapping">
-      <str name="valueClass">java.lang.String</str>
-      <str name="fieldType">text_general</str>
-      <lst name="copyField">
-        <str name="dest">*_str</str>
-        <int name="maxChars">256</int>
-      </lst>
-
-      <bool name="default">true</bool>
-    </lst>
-    <lst name="typeMapping">
-      <str name="valueClass">java.lang.Boolean</str>
-      <str name="fieldType">booleans</str>
-    </lst>
-    <lst name="typeMapping">
-      <str name="valueClass">java.util.Date</str>
-      <str name="fieldType">pdates</str>
-    </lst>
-    <lst name="typeMapping">
-      <str name="valueClass">java.lang.Long</str>
-      <str name="valueClass">java.lang.Integer</str>
-      <str name="fieldType">plongs</str>
-    </lst>
-    <lst name="typeMapping">
-      <str name="valueClass">java.lang.Number</str>
-      <str name="fieldType">pdoubles</str>
-    </lst>
-    </updateProcessor> -->
-
-  <!-- The update.autoCreateFields property can be turned to false to disable schemaless mode -->
-  <updateRequestProcessorChain name="add-unknown-fields-to-the-schema" default="${update.autoCreateFields:false}"
-           processor="uuid,remove-blank,field-name-mutating,parse-boolean,parse-long,parse-double,parse-date">
-    <processor class="solr.LogUpdateProcessorFactory"/>
-    <processor class="solr.DistributedUpdateProcessorFactory"/>
-    <processor class="solr.RunUpdateProcessorFactory"/>
-  </updateRequestProcessorChain>
-
-  <!-- Deduplication
-
-       An example dedup update processor that creates the "id" field
-       on the fly based on the hash code of some other fields.  This
-       example has overwriteDupes set to false since we are using the
-       id field as the signatureField and Solr will maintain
-       uniqueness based on that anyway.
-
-    -->
-  <!--
-     <updateRequestProcessorChain name="dedupe">
-       <processor class="solr.processor.SignatureUpdateProcessorFactory">
-         <bool name="enabled">true</bool>
-         <str name="signatureField">id</str>
-         <bool name="overwriteDupes">false</bool>
-         <str name="fields">name,features,cat</str>
-         <str name="signatureClass">solr.processor.Lookup3Signature</str>
-       </processor>
-       <processor class="solr.LogUpdateProcessorFactory" />
-       <processor class="solr.RunUpdateProcessorFactory" />
-     </updateRequestProcessorChain>
-    -->
-
-  <!-- Language identification
-
-       This example update chain identifies the language of the incoming
-       documents using the langid contrib. The detected language is
-       written to field language_s. No field name mapping is done.
-       The fields used for detection are text, title, subject and description,
-       making this example suitable for detecting languages form full-text
-       rich documents injected via ExtractingRequestHandler.
-       See more about langId at http://wiki.apache.org/solr/LanguageDetection
-    -->
-  <!--
-   <updateRequestProcessorChain name="langid">
-     <processor class="org.apache.solr.update.processor.TikaLanguageIdentifierUpdateProcessorFactory">
-       <str name="langid.fl">text,title,subject,description</str>
-       <str name="langid.langField">language_s</str>
-       <str name="langid.fallback">en</str>
-     </processor>
-     <processor class="solr.LogUpdateProcessorFactory" />
-     <processor class="solr.RunUpdateProcessorFactory" />
-   </updateRequestProcessorChain>
-  -->
-
-  <!-- Script update processor
-
-    This example hooks in an update processor implemented using JavaScript.
-
-    See more about the script update processor at http://wiki.apache.org/solr/ScriptUpdateProcessor
-  -->
-  <!--
-    <updateRequestProcessorChain name="script">
-      <processor class="solr.StatelessScriptUpdateProcessorFactory">
-        <str name="script">update-script.js</str>
-        <lst name="params">
-          <str name="config_param">example config parameter</str>
-        </lst>
-      </processor>
-      <processor class="solr.RunUpdateProcessorFactory" />
-    </updateRequestProcessorChain>
-  -->
-
-  <!-- Response Writers
-
-       http://wiki.apache.org/solr/QueryResponseWriter
-
-       Request responses will be written using the writer specified by
-       the 'wt' request parameter matching the name of a registered
-       writer.
-
-       The "default" writer is the default and will be used if 'wt' is
-       not specified in the request.
-    -->
-  <!-- The following response writers are implicitly configured unless
-       overridden...
-    -->
-  <!--
-     <queryResponseWriter name="xml"
-                          default="true"
-                          class="solr.XMLResponseWriter" />
-     <queryResponseWriter name="json" class="solr.JSONResponseWriter"/>
-     <queryResponseWriter name="python" class="solr.PythonResponseWriter"/>
-     <queryResponseWriter name="ruby" class="solr.RubyResponseWriter"/>
-     <queryResponseWriter name="php" class="solr.PHPResponseWriter"/>
-     <queryResponseWriter name="phps" class="solr.PHPSerializedResponseWriter"/>
-     <queryResponseWriter name="csv" class="solr.CSVResponseWriter"/>
-     <queryResponseWriter name="schema.xml" class="solr.SchemaXmlResponseWriter"/>
-    -->
-
-  <queryResponseWriter name="json" class="solr.JSONResponseWriter">
-    <!-- For the purposes of the tutorial, JSON responses are written as
-     plain text so that they are easy to read in *any* browser.
-     If you expect a MIME type of "application/json" just remove this override.
-    -->
-    <str name="content-type">text/plain; charset=UTF-8</str>
-  </queryResponseWriter>
-
-  <!--
-     Custom response writers can be declared as needed...
-    -->
-  <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy">
-    <str name="template.base.dir">${velocity.template.base.dir:}</str>
-    <str name="solr.resource.loader.enabled">${velocity.solr.resource.loader.enabled:true}</str>
-    <str name="params.resource.loader.enabled">${velocity.params.resource.loader.enabled:false}</str>
-  </queryResponseWriter>
-
-  <!-- XSLT response writer transforms the XML output by any xslt file found
-       in Solr's conf/xslt directory.  Changes to xslt files are checked for
-       every xsltCacheLifetimeSeconds.
-    -->
-  <queryResponseWriter name="xslt" class="solr.XSLTResponseWriter">
-    <int name="xsltCacheLifetimeSeconds">5</int>
-  </queryResponseWriter>
-
-  <!-- Query Parsers
-
-       https://lucene.apache.org/solr/guide/query-syntax-and-parsing.html
-
-       Multiple QParserPlugins can be registered by name, and then
-       used in either the "defType" param for the QueryComponent (used
-       by SearchHandler) or in LocalParams
-    -->
-  <!-- example of registering a query parser -->
-  <!--
-     <queryParser name="myparser" class="com.mycompany.MyQParserPlugin"/>
-    -->
-
-  <!-- Function Parsers
-
-       http://wiki.apache.org/solr/FunctionQuery
-
-       Multiple ValueSourceParsers can be registered by name, and then
-       used as function names when using the "func" QParser.
-    -->
-  <!-- example of registering a custom function parser  -->
-  <!--
-     <valueSourceParser name="myfunc"
-                        class="com.mycompany.MyValueSourceParser" />
-    -->
-
-
-  <!-- Document Transformers
-       http://wiki.apache.org/solr/DocTransformers
-    -->
-  <!--
-     Could be something like:
-     <transformer name="db" class="com.mycompany.LoadFromDatabaseTransformer" >
-       <int name="connection">jdbc://....</int>
-     </transformer>
-
-     To add a constant value to all docs, use:
-     <transformer name="mytrans2" class="org.apache.solr.response.transform.ValueAugmenterFactory" >
-       <int name="value">5</int>
-     </transformer>
-
-     If you want the user to still be able to change it with _value:something_ use this:
-     <transformer name="mytrans3" class="org.apache.solr.response.transform.ValueAugmenterFactory" >
-       <double name="defaultValue">5</double>
-     </transformer>
-
-      If you are using the QueryElevationComponent, you may wish to mark documents that get boosted.  The
-      EditorialMarkerFactory will do exactly that:
-     <transformer name="qecBooster" class="org.apache.solr.response.transform.EditorialMarkerFactory" />
-    -->
-</config>
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!--
+     For more details about configurations options that may appear in
+     this file, see https://solr.apache.org/guide/solr/latest/configuration-guide/configuring-solrconfig-xml.html.
+-->
+<config>
+  <!-- In all configuration below, a prefix of "solr." for class names
+       is an alias that causes solr to search appropriate packages,
+       including org.apache.solr.(search|update|request|core|analysis)
+
+       You may also specify a fully qualified Java classname if you
+       have your own custom plugins.
+    -->
+
+  <!-- Controls what version of Lucene various components of Solr
+       adhere to.  Generally, you want to use the latest version to
+       get all bug fixes and improvements. It is highly recommended
+       that you fully re-index after changing this setting as it can
+       affect both how text is indexed and queried.
+  -->
+  <luceneMatchVersion>9.7</luceneMatchVersion>
+
+  <!-- <lib/> directives can be used to instruct Solr to load any Jars
+       identified and use them to resolve any "plugins" specified in
+       your solrconfig.xml or schema.xml (ie: Analyzers, Request
+       Handlers, etc...).
+
+       All directories and paths are resolved relative to the
+       instanceDir.
+
+       Please note that <lib/> directives are processed in the order
+       that they appear in your solrconfig.xml file, and are "stacked"
+       on top of each other when building a ClassLoader - so if you have
+       plugin jars with dependencies on other jars, the "lower level"
+       dependency jars should be loaded first.
+
+       If a "./lib" directory exists in your instanceDir, all files
+       found in it are included as if you had used the following
+       syntax...
+
+              <lib dir="./lib" />
+    -->
+
+  <!-- A 'dir' option by itself adds any files found in the directory
+       to the classpath, this is useful for including all jars in a
+       directory.
+
+       When a 'regex' is specified in addition to a 'dir', only the
+       files in that directory which completely match the regex
+       (anchored on both ends) will be included.
+
+       If a 'dir' option (with or without a regex) is used and nothing
+       is found that matches, a warning will be logged.
+
+       The example below can be used to load a Solr Module along
+       with their external dependencies.
+    -->
+    <!-- <lib dir="${solr.install.dir:../../../..}/modules/ltr/lib" regex=".*\.jar" /> -->
+
+  <!-- an exact 'path' can be used instead of a 'dir' to specify a
+       specific jar file.  This will cause a serious error to be logged
+       if it can't be loaded.
+    -->
+  <!--
+     <lib path="../a-jar-that-does-not-exist.jar" />
+  -->
+
+  <!-- Data Directory
+
+       Used to specify an alternate directory to hold all index data
+       other than the default ./data under the Solr home.  If
+       replication is in use, this should match the replication
+       configuration.
+    -->
+  <dataDir>${solr.data.dir:}</dataDir>
+
+
+  <!-- The DirectoryFactory to use for indexes.
+
+       solr.StandardDirectoryFactory is filesystem
+       based and tries to pick the best implementation for the current
+       JVM and platform.  solr.NRTCachingDirectoryFactory, the default,
+       wraps solr.StandardDirectoryFactory and caches small files in memory
+       for better NRT performance.
+
+       One can force a particular implementation via solr.MMapDirectoryFactory
+       or solr.NIOFSDirectoryFactory.
+
+       solr.RAMDirectoryFactory is memory based and not persistent.
+    -->
+  <directoryFactory name="DirectoryFactory"
+                    class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}"/>
+
+  <!-- The CodecFactory for defining the format of the inverted index.
+       The default implementation is SchemaCodecFactory, which is the official Lucene
+       index format, but hooks into the schema to provide per-field customization of
+       the postings lists and per-document values in the fieldType element
+       (postingsFormat/docValuesFormat). Note that most of the alternative implementations
+       are experimental, so if you choose to customize the index format, it's a good
+       idea to convert back to the official format e.g. via IndexWriter.addIndexes(IndexReader)
+       before upgrading to a newer version to avoid unnecessary reindexing.
+       A "compressionMode" string element can be added to <codecFactory> to choose
+       between the existing compression modes in the default codec: "BEST_SPEED" (default)
+       or "BEST_COMPRESSION".
+  -->
+  <codecFactory class="solr.SchemaCodecFactory"/>
+
+  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+       Index Config - These settings control low-level behavior of indexing
+       Most example settings here show the default value, but are commented
+       out, to more easily see where customizations have been made.
+
+       Note: This replaces <indexDefaults> and <mainIndex> from older versions
+       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
+  <indexConfig>
+    <!-- maxFieldLength was removed in 4.0. To get similar behavior, include a
+         LimitTokenCountFilterFactory in your fieldType definition. E.g.
+     <filter class="solr.LimitTokenCountFilterFactory" maxTokenCount="10000"/>
+    -->
+    <!-- Maximum time to wait for a write lock (ms) for an IndexWriter. Default: 1000 -->
+    <!-- <writeLockTimeout>1000</writeLockTimeout>  -->
+
+    <!-- Expert: Enabling compound file will use less files for the index,
+         using fewer file descriptors on the expense of performance decrease.
+         Default in Lucene is "true". Default in Solr is "false" (since 3.6) -->
+    <!-- <useCompoundFile>false</useCompoundFile> -->
+
+    <!-- ramBufferSizeMB sets the amount of RAM that may be used by Lucene
+         indexing for buffering added documents and deletions before they are
+         flushed to the Directory.
+         maxBufferedDocs sets a limit on the number of documents buffered
+         before flushing.
+         If both ramBufferSizeMB and maxBufferedDocs is set, then
+         Lucene will flush based on whichever limit is hit first.  -->
+    <!-- <ramBufferSizeMB>100</ramBufferSizeMB> -->
+    <!-- <maxBufferedDocs>1000</maxBufferedDocs> -->
+
+    <!-- Expert: ramPerThreadHardLimitMB sets the maximum amount of RAM that can be consumed
+         per thread before they are flushed. When limit is exceeded, this triggers a forced
+         flush even if ramBufferSizeMB has not been exceeded.
+         This is a safety limit to prevent Lucene's DocumentsWriterPerThread from address space
+         exhaustion due to its internal 32 bit signed integer based memory addressing.
+         The specified value should be greater than 0 and less than 2048MB. When not specified,
+         Solr uses Lucene's default value 1945. -->
+    <!-- <ramPerThreadHardLimitMB>1945</ramPerThreadHardLimitMB> -->
+
+    <!-- Expert: Merge Policy
+         The Merge Policy in Lucene controls how merging of segments is done.
+         The default since Solr/Lucene 3.3 is TieredMergePolicy.
+         The default since Lucene 2.3 was the LogByteSizeMergePolicy,
+         Even older versions of Lucene used LogDocMergePolicy.
+      -->
+    <!--
+        <mergePolicyFactory class="org.apache.solr.index.TieredMergePolicyFactory">
+          <int name="maxMergeAtOnce">10</int>
+          <int name="segmentsPerTier">10</int>
+          <double name="noCFSRatio">0.1</double>
+        </mergePolicyFactory>
+      -->
+
+    <!-- Expert: Merge Scheduler
+         The Merge Scheduler in Lucene controls how merges are
+         performed.  The ConcurrentMergeScheduler (Lucene 2.3 default)
+         can perform merges in the background using separate threads.
+         The SerialMergeScheduler (Lucene 2.2 default) does not.
+     -->
+    <!--
+       <mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>
+       -->
+
+    <!-- LockFactory
+
+         This option specifies which Lucene LockFactory implementation
+         to use.
+
+         single = SingleInstanceLockFactory - suggested for a
+                  read-only index or when there is no possibility of
+                  another process trying to modify the index.
+         native = NativeFSLockFactory - uses OS native file locking.
+                  Do not use when multiple solr webapps in the same
+                  JVM are attempting to share a single index.
+         simple = SimpleFSLockFactory  - uses a plain file for locking
+
+         Defaults: 'native' is default for Solr3.6 and later, otherwise
+                   'simple' is the default
+
+         More details on the nuances of each LockFactory...
+         https://cwiki.apache.org/confluence/display/lucene/AvailableLockFactories
+    -->
+    <lockType>${solr.lock.type:native}</lockType>
+
+    <!-- Commit Deletion Policy
+         Custom deletion policies can be specified here. The class must
+         implement org.apache.lucene.index.IndexDeletionPolicy.
+
+         The default Solr IndexDeletionPolicy implementation supports
+         deleting index commit points on number of commits, age of
+         commit point and optimized status.
+
+         The latest commit point should always be preserved regardless
+         of the criteria.
+    -->
+    <!--
+    <deletionPolicy class="solr.SolrDeletionPolicy">
+    -->
+    <!-- The number of commit points to be kept -->
+    <!-- <str name="maxCommitsToKeep">1</str> -->
+    <!-- The number of optimized commit points to be kept -->
+    <!-- <str name="maxOptimizedCommitsToKeep">0</str> -->
+    <!--
+        Delete all commit points once they have reached the given age.
+        Supports DateMathParser syntax e.g.
+      -->
+    <!--
+       <str name="maxCommitAge">30MINUTES</str>
+       <str name="maxCommitAge">1DAY</str>
+    -->
+    <!--
+    </deletionPolicy>
+    -->
+
+    <!-- Lucene Infostream
+
+         To aid in advanced debugging, Lucene provides an "InfoStream"
+         of detailed information when indexing.
+
+         Setting The value to true will instruct the underlying Lucene
+         IndexWriter to write its debugging info the specified file
+      -->
+    <!-- <infoStream file="INFOSTREAM.txt">false</infoStream> -->
+  </indexConfig>
+
+  <!-- The default high-performance update handler -->
+  <updateHandler class="solr.DirectUpdateHandler2">
+
+    <!-- Enables a transaction log, used for real-time get, durability, and
+         and solr cloud replica recovery.  The log can grow as big as
+         uncommitted changes to the index, so use of a hard autoCommit
+         is recommended (see below).
+         "dir" - the target directory for transaction logs, defaults to the
+                solr data directory.
+         "numVersionBuckets" - sets the number of buckets used to keep
+                track of max version values when checking for re-ordered
+                updates; increase this value to reduce the cost of
+                synchronizing access to version buckets during high-volume
+                indexing, this requires 8 bytes (long) * numVersionBuckets
+                of heap space per Solr core.
+    -->
+    <updateLog>
+      <str name="dir">${solr.ulog.dir:}</str>
+      <int name="numVersionBuckets">${solr.ulog.numVersionBuckets:65536}</int>
+    </updateLog>
+
+    <!-- AutoCommit
+
+         Perform a hard commit automatically under certain conditions.
+         Instead of enabling autoCommit, consider using "commitWithin"
+         when adding documents.
+
+         https://solr.apache.org/guide/solr/latest/indexing-guide/indexing-with-update-handlers.html
+
+         maxDocs - Maximum number of documents to add since the last
+                   commit before automatically triggering a new commit.
+
+         maxTime - Maximum amount of time in ms that is allowed to pass
+                   since a document was added before automatically
+                   triggering a new commit.
+         openSearcher - if false, the commit causes recent index changes
+           to be flushed to stable storage, but does not cause a new
+           searcher to be opened to make those changes visible.
+
+         If the updateLog is enabled, then it's highly recommended to
+         have some sort of hard autoCommit to limit the log size.
+      -->
+    <autoCommit>
+      <maxTime>${solr.autoCommit.maxTime:15000}</maxTime>
+      <openSearcher>false</openSearcher>
+    </autoCommit>
+
+    <!-- softAutoCommit is like autoCommit except it causes a
+         'soft' commit which only ensures that changes are visible
+         but does not ensure that data is synced to disk.  This is
+         faster and more near-realtime friendly than a hard commit.
+      -->
+
+    <autoSoftCommit>
+      <maxTime>${solr.autoSoftCommit.maxTime:-1}</maxTime>
+    </autoSoftCommit>
+
+    <!-- Update Related Event Listeners
+
+         Various IndexWriter related events can trigger Listeners to
+         take actions.
+
+         postCommit - fired after every commit or optimize command
+         postOptimize - fired after every optimize command
+      -->
+
+  </updateHandler>
+
+  <!-- IndexReaderFactory
+
+       Use the following format to specify a custom IndexReaderFactory,
+       which allows for alternate IndexReader implementations.
+
+       ** Experimental Feature **
+
+       Please note - Using a custom IndexReaderFactory may prevent
+       certain other features from working. The API to
+       IndexReaderFactory may change without warning or may even be
+       removed from future releases if the problems cannot be
+       resolved.
+
+
+       ** Features that may not work with custom IndexReaderFactory **
+
+       The ReplicationHandler assumes a disk-resident index. Using a
+       custom IndexReader implementation may cause incompatibility
+       with ReplicationHandler and may cause replication to not work
+       correctly. See SOLR-1366 for details.
+
+    -->
+  <!--
+  <indexReaderFactory name="IndexReaderFactory" class="package.class">
+    <str name="someArg">Some Value</str>
+  </indexReaderFactory >
+  -->
+
+  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+       Query section - these settings control query time things like caches
+       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
+  <query>
+
+    <!-- Maximum number of clauses allowed when parsing a boolean query string.
+
+         This limit only impacts boolean queries specified by a user as part of a query string,
+         and provides per-collection controls on how complex user specified boolean queries can
+         be.  Query strings that specify more clauses than this will result in an error.
+
+         If this per-collection limit is greater than the global `maxBooleanClauses` limit
+         specified in `solr.xml`, it will have no effect, as that setting also limits the size
+         of user specified boolean queries.
+      -->
+    <maxBooleanClauses>${solr.max.booleanClauses:1024}</maxBooleanClauses>
+
+    <!-- Solr Internal Query Caches
+         Starting with Solr 9.0 the default cache implementation used is CaffeineCache.
+    -->
+
+    <!-- Filter Cache
+
+         Cache used by SolrIndexSearcher for filters (DocSets),
+         unordered sets of *all* documents that match a query.  When a
+         new searcher is opened, its caches may be prepopulated or
+         "autowarmed" using data from caches in the old searcher.
+         autowarmCount is the number of items to prepopulate. For
+         CaffeineCache, the autowarmed items will be the most recently
+         accessed items.
+
+         Parameters:
+           class - the SolrCache implementation (CaffeineCache by default)
+           size - the maximum number of entries in the cache
+           initialSize - the initial capacity (number of entries) of
+               the cache.  (see java.util.HashMap)
+           autowarmCount - the number of entries to prepopulate from
+               an old cache.
+           maxRamMB - the maximum amount of RAM (in MB) that this cache is allowed
+                      to occupy. Note that when this option is specified, the size
+                      and initialSize parameters are ignored.
+      -->
+    <filterCache size="512"
+                 initialSize="512"
+                 autowarmCount="0"/>
+
+    <!-- Query Result Cache
+
+         Caches results of searches - ordered lists of document ids
+         (DocList) based on a query, a sort, and the range of documents requested.
+         Additional supported parameter by CaffeineCache:
+            maxRamMB - the maximum amount of RAM (in MB) that this cache is allowed
+                       to occupy
+      -->
+    <queryResultCache size="512"
+                      initialSize="512"
+                      autowarmCount="0"/>
+
+    <!-- Document Cache
+
+         Caches Lucene Document objects (the stored fields for each
+         document).  Since Lucene internal document ids are transient,
+         this cache will not be autowarmed.
+      -->
+    <documentCache size="512"
+                   initialSize="512"
+                   autowarmCount="0"/>
+
+    <!-- custom cache currently used by block join -->
+    <cache name="perSegFilter"
+           class="solr.CaffeineCache"
+           size="10"
+           initialSize="0"
+           autowarmCount="10"
+           regenerator="solr.NoOpRegenerator" />
+
+    <!-- Field Value Cache
+
+         Cache used to hold field values that are quickly accessible
+         by document id.  The fieldValueCache is created by default
+         even if not configured here.
+      -->
+    <!--
+       <fieldValueCache size="512"
+                        autowarmCount="128"
+                        />
+      -->
+
+    <!-- Custom Cache
+
+         Example of a generic cache.  These caches may be accessed by
+         name through SolrIndexSearcher.getCache(),cacheLookup(), and
+         cacheInsert().  The purpose is to enable easy caching of
+         user/application level data.  The regenerator argument should
+         be specified as an implementation of solr.CacheRegenerator
+         if autowarming is desired.
+      -->
+    <!--
+       <cache name="myUserCache"
+              class="solr.CaffeineCache"
+              size="4096"
+              initialSize="1024"
+              autowarmCount="1024"
+              regenerator="com.mycompany.MyRegenerator"
+              />
+      -->
+
+
+    <!-- Lazy Field Loading
+
+         If true, stored fields that are not requested will be loaded
+         lazily.  This can result in a significant speed improvement
+         if the usual case is to not load all stored fields,
+         especially if the skipped fields are large compressed text
+         fields.
+    -->
+    <enableLazyFieldLoading>true</enableLazyFieldLoading>
+
+    <!-- Use Filter For Sorted Query
+
+         A possible optimization that attempts to use a filter to
+         satisfy a search.  If the requested sort does not include
+         score, then the filterCache will be checked for a filter
+         matching the query. If found, the filter will be used as the
+         source of document ids, and then the sort will be applied to
+         that.
+
+         For most situations, this will not be useful unless you
+         frequently get the same search repeatedly with different sort
+         options, and none of them ever use "score"
+      -->
+    <!--
+       <useFilterForSortedQuery>true</useFilterForSortedQuery>
+      -->
+
+    <!-- Result Window Size
+
+         An optimization for use with the queryResultCache.  When a search
+         is requested, a superset of the requested number of document ids
+         are collected.  For example, if a search for a particular query
+         requests matching documents 10 through 19, and queryWindowSize is 50,
+         then documents 0 through 49 will be collected and cached.  Any further
+         requests in that range can be satisfied via the cache.
+      -->
+    <queryResultWindowSize>20</queryResultWindowSize>
+
+    <!-- Maximum number of documents to cache for any entry in the
+         queryResultCache.
+      -->
+    <queryResultMaxDocsCached>200</queryResultMaxDocsCached>
+
+  <!-- Use Filter For Sorted Query
+
+   A possible optimization that attempts to use a filter to
+   satisfy a search.  If the requested sort does not include
+   score, then the filterCache will be checked for a filter
+   matching the query. If found, the filter will be used as the
+   source of document ids, and then the sort will be applied to
+   that.
+
+   For most situations, this will not be useful unless you
+   frequently get the same search repeatedly with different sort
+   options, and none of them ever use "score"
+-->
+    <!--
+       <useFilterForSortedQuery>true</useFilterForSortedQuery>
+      -->
+
+    <!-- Query Related Event Listeners
+
+         Various IndexSearcher related events can trigger Listeners to
+         take actions.
+
+         newSearcher - fired whenever a new searcher is being prepared
+         and there is a current searcher handling requests (aka
+         registered).  It can be used to prime certain caches to
+         prevent long request times for certain requests.
+
+         firstSearcher - fired whenever a new searcher is being
+         prepared but there is no current registered searcher to handle
+         requests or to gain autowarming data from.
+
+
+      -->
+    <!-- QuerySenderListener takes an array of NamedList and executes a
+         local query request for each NamedList in sequence.
+      -->
+    <listener event="newSearcher" class="solr.QuerySenderListener">
+      <arr name="queries">
+        <!--
+           <lst><str name="q">solr</str><str name="sort">price asc</str></lst>
+           <lst><str name="q">rocks</str><str name="sort">weight asc</str></lst>
+          -->
+      </arr>
+    </listener>
+    <listener event="firstSearcher" class="solr.QuerySenderListener">
+      <arr name="queries">
+        <!--
+        <lst>
+          <str name="q">static firstSearcher warming in solrconfig.xml</str>
+        </lst>
+        -->
+      </arr>
+    </listener>
+
+    <!-- Use Cold Searcher
+
+         If a search request comes in and there is no current
+         registered searcher, then immediately register the still
+         warming searcher and use it.  If "false" then all requests
+         will block until the first searcher is done warming.
+      -->
+    <useColdSearcher>false</useColdSearcher>
+
+  </query>
+
+  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+     Circuit Breaker Section - This section consists of configurations for
+     circuit breakers
+     ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
+  <!-- Circuit breakers are designed to allow stability and predictable query
+     execution. They prevent operations that can take down the node and cause
+     noisy neighbour issues.
+
+     The CircuitBreakerManager is the default manager for all circuit breakers.
+     The enabled flag here controls the activation/deactivation of all circuit
+     breakers specified within.
+  -->
+  <circuitBreaker class="solr.CircuitBreakerManager" enabled="true">
+    <!-- Memory Circuit Breaker
+
+     Specific configuration for max JVM heap usage circuit breaker. This configuration defines
+     whether the circuit breaker is enabled and the threshold percentage of maximum heap allocated
+     beyond which queries will be rejected until the current JVM usage goes below the threshold.
+     The valid value for this range is 50-95.
+
+     Consider a scenario where the max heap allocated is 4 GB and memThreshold is defined as 75.
+     Threshold JVM usage will be 4 * 0.75 = 3 GB. Its generally a good idea to keep this value
+     between 75 - 80% of maximum heap allocated.
+
+     If, at any point, the current JVM heap usage goes above 3 GB, queries will be rejected until
+     the heap usage goes below 3 GB again. If you see queries getting rejected with 503 error code,
+     check for "Circuit Breakers tripped" in logs and the corresponding error message should tell
+     you what transpired (if the failure was caused by tripped circuit breakers).
+    -->
+    <!--
+    <str name="memEnabled">true</str>
+    <str name="memThreshold">75</str>
+    -->
+
+    <!-- CPU Circuit Breaker Configuration
+
+     Specific configuration for CPU utilization based circuit breaker. This configuration defines
+     whether the circuit breaker is enabled and the average load over the last minute at which the
+     circuit breaker should start rejecting queries.
+    -->
+    <!--
+    <str name="cpuEnabled">true</str>
+    <str name="cpuThreshold">75</str>
+    -->
+  </circuitBreaker>
+
+  <!-- Request Dispatcher
+
+       This section contains instructions for how the SolrDispatchFilter
+       should behave when processing requests for this SolrCore.
+
+    -->
+  <requestDispatcher>
+    <!-- Request Parsing
+
+         These settings indicate how Solr Requests may be parsed, and
+         what restrictions may be placed on the ContentStreams from
+         those requests
+
+         multipartUploadLimitInKB - specifies the max size (in KiB) of
+         Multipart File Uploads that Solr will allow in a Request.
+
+         formdataUploadLimitInKB - specifies the max size (in KiB) of
+         form data (application/x-www-form-urlencoded) sent via
+         POST. You can use POST to pass request parameters not
+         fitting into the URL.
+
+         addHttpRequestToContext - if set to true, it will instruct
+         the requestParsers to include the original HttpServletRequest
+         object in the context map of the SolrQueryRequest under the
+         key "httpRequest". It will not be used by any of the existing
+         Solr components, but may be useful when developing custom
+         plugins.
+
+    <requestParsers multipartUploadLimitInKB="-1"
+                    formdataUploadLimitInKB="-1"
+                    addHttpRequestToContext="false"/>
+      -->
+
+    <!-- HTTP Caching
+
+         Set HTTP caching related parameters (for proxy caches and clients).
+
+         The options below instruct Solr not to output any HTTP Caching
+         related headers
+      -->
+    <httpCaching never304="true" />
+    <!-- If you include a <cacheControl> directive, it will be used to
+         generate a Cache-Control header (as well as an Expires header
+         if the value contains "max-age=")
+
+         By default, no Cache-Control header is generated.
+
+         You can use the <cacheControl> option even if you have set
+         never304="true"
+      -->
+    <!--
+       <httpCaching never304="true" >
+         <cacheControl>max-age=30, public</cacheControl>
+       </httpCaching>
+      -->
+    <!-- To enable Solr to respond with automatically generated HTTP
+         Caching headers, and to response to Cache Validation requests
+         correctly, set the value of never304="false"
+
+         This will cause Solr to generate Last-Modified and ETag
+         headers based on the properties of the Index.
+
+         The following options can also be specified to affect the
+         values of these headers...
+
+         lastModFrom - the default value is "openTime" which means the
+         Last-Modified value (and validation against If-Modified-Since
+         requests) will all be relative to when the current Searcher
+         was opened.  You can change it to lastModFrom="dirLastMod" if
+         you want the value to exactly correspond to when the physical
+         index was last modified.
+
+         etagSeed="..." is an option you can change to force the ETag
+         header (and validation against If-None-Match requests) to be
+         different even if the index has not changed (ie: when making
+         significant changes to your config file)
+
+         (lastModifiedFrom and etagSeed are both ignored if you use
+         the never304="true" option)
+      -->
+    <!--
+       <httpCaching lastModifiedFrom="openTime"
+                    etagSeed="Solr">
+         <cacheControl>max-age=30, public</cacheControl>
+       </httpCaching>
+      -->
+  </requestDispatcher>
+
+  <!-- Request Handlers
+
+       https://solr.apache.org/guide/solr/latest/configuration-guide/requesthandlers-searchcomponents.html
+
+       Incoming queries will be dispatched to a specific handler by name based on the path specified in the request.
+
+       All handlers (Search Handlers, Update Request Handlers, and other specialized types) can have default parameters (defaults, appends and invariants).
+
+       Search Handlers can also (append, prepend or even replace) default or defined Search Components.
+
+       Update Request Handlers can leverage Update Request Processors to pre-process documents after they are loaded
+       and before they are indexed/stored.
+
+       Not all Request Handlers are defined in the solrconfig.xml, many are implicit.
+    -->
+
+  <!-- Primary search handler, expected by most clients, examples and UI frameworks -->
+  <requestHandler name="/select" class="solr.SearchHandler">
+    <lst name="defaults">
+      <str name="echoParams">explicit</str>
+      <int name="rows">10</int>
+      <!--
+        This boosting configuration has been
+        - first introduced in 2015, see https://github.com/IQSS/dataverse/issues/1928#issuecomment-91651853,
+        - been re-introduced in 2018 for Solr 7.2.1 update, see https://github.com/IQSS/dataverse/issues/4158,
+        - and finally evolved to the current state later in 2018 https://github.com/IQSS/dataverse/issues/4938
+          (merged with https://github.com/IQSS/dataverse/commit/3843e5366845d55c327cdb252dd9b4e4125b9b88)
+                
+        Since then, this has not been touched again (2021-12-21).
+      -->
+      <str name="defType">edismax</str>
+      <float name="tie">0.075</float>
+      <str name="qf">
+          dvName^400
+          authorName^180
+          dvSubject^190
+          dvDescription^180
+          dvAffiliation^170
+          title^130
+          subject^120
+          keyword^110
+          topicClassValue^100
+          dsDescriptionValue^90
+          authorAffiliation^80
+          publicationCitation^60
+          producerName^50
+          fileName^30
+          fileDescription^30
+          variableLabel^20
+          variableName^10
+          _text_^1.0
+      </str>
+      <str name="pf">
+          dvName^200
+          authorName^100
+          dvSubject^100
+          dvDescription^100
+          dvAffiliation^100
+          title^75
+          subject^75
+          keyword^75
+          topicClassValue^75
+          dsDescriptionValue^75
+          authorAffiliation^75
+          publicationCitation^75
+          producerName^75
+      </str>
+      <!-- Even though this number is huge it only seems to apply a boost of ~1.5x to final result -MAD 4.9.3 -->
+      <str name="bq">
+          isHarvested:false^25000
+      </str>
+    </lst>
+  </requestHandler>
+
+  <!-- A request handler that returns indented JSON by default -->
+  <requestHandler name="/query" class="solr.SearchHandler">
+    <lst name="defaults">
+      <str name="echoParams">explicit</str>
+      <str name="wt">json</str>
+      <str name="indent">true</str>
+    </lst>
+  </requestHandler>
+
+  <!-- Shared parameters for multiple Request Handlers -->
+  <initParams path="/update/**,/query,/select,/spell">
+    <lst name="defaults">
+      <str name="df">_text_</str>
+    </lst>
+  </initParams>
+
+  <!-- Spell Check
+
+       The spell check component can return a list of alternative spelling
+       suggestions.
+
+       https://solr.apache.org/guide/solr/latest/query-guide/spell-checking.html
+    -->
+  <searchComponent name="spellcheck" class="solr.SpellCheckComponent">
+
+    <str name="queryAnalyzerFieldType">text_general</str>
+
+    <!-- Multiple "Spell Checkers" can be declared and used by this
+         component
+      -->
+
+    <!-- a spellchecker built from a field of the main index -->
+    <lst name="spellchecker">
+      <str name="name">default</str>
+      <str name="field">_text_</str>
+      <str name="classname">solr.DirectSolrSpellChecker</str>
+      <!-- the spellcheck distance measure used, the default is the internal levenshtein -->
+      <str name="distanceMeasure">internal</str>
+      <!-- minimum accuracy needed to be considered a valid spellcheck suggestion -->
+      <float name="accuracy">0.5</float>
+      <!-- the maximum #edits we consider when enumerating terms: can be 1 or 2 -->
+      <int name="maxEdits">2</int>
+      <!-- the minimum shared prefix when enumerating terms -->
+      <int name="minPrefix">1</int>
+      <!-- maximum number of inspections per result. -->
+      <int name="maxInspections">5</int>
+      <!-- minimum length of a query term to be considered for correction -->
+      <int name="minQueryLength">4</int>
+      <!-- maximum threshold of documents a query term can appear to be considered for correction -->
+      <float name="maxQueryFrequency">0.01</float>
+      <!-- uncomment this to require suggestions to occur in 1% of the documents
+        <float name="thresholdTokenFrequency">.01</float>
+      -->
+    </lst>
+
+    <!-- a spellchecker that can break or combine words.  See "/spell" handler below for usage -->
+    <!--
+    <lst name="spellchecker">
+      <str name="name">wordbreak</str>
+      <str name="classname">solr.WordBreakSolrSpellChecker</str>
+      <str name="field">name</str>
+      <str name="combineWords">true</str>
+      <str name="breakWords">true</str>
+      <int name="maxChanges">10</int>
+    </lst>
+    -->
+  </searchComponent>
+
+  <!-- A request handler for demonstrating the spellcheck component.
+
+       NOTE: This is purely as an example.  The whole purpose of the
+       SpellCheckComponent is to hook it into the request handler that
+       handles your normal user queries so that a separate request is
+       not needed to get suggestions.
+
+       IN OTHER WORDS, THERE IS REALLY GOOD CHANCE THE SETUP BELOW IS
+       NOT WHAT YOU WANT FOR YOUR PRODUCTION SYSTEM!
+
+       See https://solr.apache.org/guide/solr/latest/query-guide/spell-checking.html for details
+       on the request parameters.
+    -->
+  <!--
+  <requestHandler name="/spell" class="solr.SearchHandler" startup="lazy">
+    <lst name="defaults">
+    -->
+      <!-- Solr will use suggestions from both the 'default' spellchecker
+           and from the 'wordbreak' spellchecker and combine them.
+           collations (re-written queries) can include a combination of
+           corrections from both spellcheckers -->
+  <!--
+      <str name="spellcheck.dictionary">default</str>
+      <str name="spellcheck">on</str>
+      <str name="spellcheck.extendedResults">true</str>
+      <str name="spellcheck.count">10</str>
+      <str name="spellcheck.alternativeTermCount">5</str>
+      <str name="spellcheck.maxResultsForSuggest">5</str>
+      <str name="spellcheck.collate">true</str>
+      <str name="spellcheck.collateExtendedResults">true</str>
+      <str name="spellcheck.maxCollationTries">10</str>
+      <str name="spellcheck.maxCollations">5</str>
+    </lst>
+    <arr name="last-components">
+      <str>spellcheck</str>
+    </arr>
+  </requestHandler>
+  -->
+
+  <!-- Highlighting Component
+
+       https://solr.apache.org/guide/solr/latest/query-guide/highlighting.html
+    -->
+  <searchComponent class="solr.HighlightComponent" name="highlight">
+    <!-- note: the hl.method=unified highlighter is not configured here; it's completely configured
+    via parameters.  The below configuration supports hl.method=original and fastVector. -->
+    <highlighting>
+      <!-- Configure the standard fragmenter -->
+      <!-- This could most likely be commented out in the "default" case -->
+      <fragmenter name="gap"
+                  default="true"
+                  class="solr.highlight.GapFragmenter">
+        <lst name="defaults">
+          <int name="hl.fragsize">100</int>
+        </lst>
+      </fragmenter>
+
+      <!-- A regular-expression-based fragmenter
+           (for sentence extraction)
+        -->
+      <fragmenter name="regex"
+                  class="solr.highlight.RegexFragmenter">
+        <lst name="defaults">
+          <!-- slightly smaller fragsizes work better because of slop -->
+          <int name="hl.fragsize">70</int>
+          <!-- allow 50% slop on fragment sizes -->
+          <float name="hl.regex.slop">0.5</float>
+          <!-- a basic sentence pattern -->
+          <str name="hl.regex.pattern">[-\w ,/\n\&quot;&apos;]{20,200}</str>
+        </lst>
+      </fragmenter>
+
+      <!-- Configure the standard formatter -->
+      <formatter name="html"
+                 default="true"
+                 class="solr.highlight.HtmlFormatter">
+        <lst name="defaults">
+          <str name="hl.simple.pre"><![CDATA[<em>]]></str>
+          <str name="hl.simple.post"><![CDATA[</em>]]></str>
+        </lst>
+      </formatter>
+
+      <!-- Configure the standard encoder -->
+      <encoder name="html"
+               class="solr.highlight.HtmlEncoder" />
+
+      <!-- Configure the standard fragListBuilder -->
+      <fragListBuilder name="simple"
+                       class="solr.highlight.SimpleFragListBuilder"/>
+
+      <!-- Configure the single fragListBuilder -->
+      <fragListBuilder name="single"
+                       class="solr.highlight.SingleFragListBuilder"/>
+
+      <!-- Configure the weighted fragListBuilder -->
+      <fragListBuilder name="weighted"
+                       default="true"
+                       class="solr.highlight.WeightedFragListBuilder"/>
+
+      <!-- default tag FragmentsBuilder -->
+      <fragmentsBuilder name="default"
+                        default="true"
+                        class="solr.highlight.ScoreOrderFragmentsBuilder">
+        <!--
+        <lst name="defaults">
+          <str name="hl.multiValuedSeparatorChar">/</str>
+        </lst>
+        -->
+      </fragmentsBuilder>
+
+      <!-- multi-colored tag FragmentsBuilder -->
+      <fragmentsBuilder name="colored"
+                        class="solr.highlight.ScoreOrderFragmentsBuilder">
+        <lst name="defaults">
+          <str name="hl.tag.pre"><![CDATA[
+               <b style="background:yellow">,<b style="background:lawgreen">,
+               <b style="background:aquamarine">,<b style="background:magenta">,
+               <b style="background:palegreen">,<b style="background:coral">,
+               <b style="background:wheat">,<b style="background:khaki">,
+               <b style="background:lime">,<b style="background:deepskyblue">]]></str>
+          <str name="hl.tag.post"><![CDATA[</b>]]></str>
+        </lst>
+      </fragmentsBuilder>
+
+      <boundaryScanner name="default"
+                       default="true"
+                       class="solr.highlight.SimpleBoundaryScanner">
+        <lst name="defaults">
+          <str name="hl.bs.maxScan">10</str>
+          <str name="hl.bs.chars">.,!? &#9;&#10;&#13;</str>
+        </lst>
+      </boundaryScanner>
+
+      <boundaryScanner name="breakIterator"
+                       class="solr.highlight.BreakIteratorBoundaryScanner">
+        <lst name="defaults">
+          <!-- type should be one of CHARACTER, WORD(default), LINE and SENTENCE -->
+          <str name="hl.bs.type">WORD</str>
+          <!-- language and country are used when constructing Locale object.  -->
+          <!-- And the Locale object will be used when getting instance of BreakIterator -->
+          <str name="hl.bs.language">en</str>
+          <str name="hl.bs.country">US</str>
+        </lst>
+      </boundaryScanner>
+    </highlighting>
+  </searchComponent>
+
+  <!-- Update Request Processors
+       https://solr.apache.org/guide/solr/latest/configuration-guide/update-request-processors.html
+
+       Chains or individual Update Request Processor Factories can be declared and referenced
+       to preprocess documents sent to Update Request Handlers.
+    -->
+
+  <!-- Add unknown fields to the schema
+
+       Field type guessing update request processors that will
+       attempt to parse string-typed field values as Booleans, Longs,
+       Doubles, or Dates, and then add schema fields with the guessed
+       field types Text content will be indexed as "text_general" as
+       well as a copy to a plain string version in *_str.
+       See the updateRequestProcessorChain defined later for the order they are executed in.
+
+       These require that the schema is both managed and mutable, by
+       declaring schemaFactory as ManagedIndexSchemaFactory, with
+       mutable specified as true.
+
+       See https://solr.apache.org/guide/solr/latest/indexing-guide/schemaless-mode.html for further explanation.
+
+    -->
+  <schemaFactory class="ClassicIndexSchemaFactory"/>
+  <updateProcessor class="solr.UUIDUpdateProcessorFactory" name="uuid"/>
+  <updateProcessor class="solr.RemoveBlankFieldUpdateProcessorFactory" name="remove-blank"/>
+  <updateProcessor class="solr.FieldNameMutatingUpdateProcessorFactory" name="field-name-mutating">
+    <str name="pattern">[^\w-\.]</str>
+    <str name="replacement">_</str>
+  </updateProcessor>
+  <updateProcessor class="solr.ParseBooleanFieldUpdateProcessorFactory" name="parse-boolean"/>
+  <updateProcessor class="solr.ParseLongFieldUpdateProcessorFactory" name="parse-long"/>
+  <updateProcessor class="solr.ParseDoubleFieldUpdateProcessorFactory" name="parse-double"/>
+  <updateProcessor class="solr.ParseDateFieldUpdateProcessorFactory" name="parse-date">
+    <arr name="format">
+      <str>yyyy-MM-dd['T'[HH:mm[:ss[.SSS]][z</str>
+      <str>yyyy-MM-dd['T'[HH:mm[:ss[,SSS]][z</str>
+      <str>yyyy-MM-dd HH:mm[:ss[.SSS]][z</str>
+      <str>yyyy-MM-dd HH:mm[:ss[,SSS]][z</str>
+      <str>[EEE, ]dd MMM yyyy HH:mm[:ss] z</str>
+      <str>EEEE, dd-MMM-yy HH:mm:ss z</str>
+      <str>EEE MMM ppd HH:mm:ss [z ]yyyy</str>
+    </arr>
+  </updateProcessor>
+  <updateProcessor class="solr.AddSchemaFieldsUpdateProcessorFactory" name="add-schema-fields">
+    <lst name="typeMapping">
+      <str name="valueClass">java.lang.String</str>
+      <str name="fieldType">text_general</str>
+      <lst name="copyField">
+        <str name="dest">*_str</str>
+        <int name="maxChars">256</int>
+      </lst>
+      <!-- Use as default mapping instead of defaultFieldType -->
+      <bool name="default">true</bool>
+    </lst>
+    <lst name="typeMapping">
+      <str name="valueClass">java.lang.Boolean</str>
+      <str name="fieldType">booleans</str>
+    </lst>
+    <lst name="typeMapping">
+      <str name="valueClass">java.util.Date</str>
+      <str name="fieldType">pdates</str>
+    </lst>
+    <lst name="typeMapping">
+      <str name="valueClass">java.lang.Long</str>
+      <str name="valueClass">java.lang.Integer</str>
+      <str name="fieldType">plongs</str>
+    </lst>
+    <lst name="typeMapping">
+      <str name="valueClass">java.lang.Number</str>
+      <str name="fieldType">pdoubles</str>
+    </lst>
+  </updateProcessor>
+
+  <!-- The update.autoCreateFields property can be turned to false to disable schemaless mode -->
+  <updateRequestProcessorChain name="add-unknown-fields-to-the-schema" default="${update.autoCreateFields:false}"
+           processor="uuid,remove-blank,field-name-mutating,parse-boolean,parse-long,parse-double,parse-date,add-schema-fields">
+    <processor class="solr.LogUpdateProcessorFactory"/>
+    <processor class="solr.DistributedUpdateProcessorFactory"/>
+    <processor class="solr.RunUpdateProcessorFactory"/>
+  </updateRequestProcessorChain>
+
+  <!-- Deduplication
+
+       An example dedup update request processor chain that creates the "id" field
+       on the fly based on the hash code of some other fields.  This
+       example has overwriteDupes set to false since we are using the
+       id field as the signatureField and Solr will maintain
+       uniqueness based on that anyway.
+
+    -->
+  <!--
+     <updateRequestProcessorChain name="dedupe">
+       <processor class="solr.processor.SignatureUpdateProcessorFactory">
+         <bool name="enabled">true</bool>
+         <str name="signatureField">id</str>
+         <str name="fields">name,features,cat</str>
+         <str name="signatureClass">solr.processor.Lookup3Signature</str>
+       </processor>
+       <processor class="solr.LogUpdateProcessorFactory" />
+       <processor class="solr.RunUpdateProcessorFactory" />
+     </updateRequestProcessorChain>
+    -->
+
+  <!-- Response Writers
+
+       https://solr.apache.org/guide/solr/latest/query-guide/response-writers.html
+
+       Request responses will be written using the writer specified by
+       the 'wt' request parameter matching the name of a registered
+       writer.
+
+       The "default" writer is the default and will be used if 'wt' is
+       not specified in the request.
+    -->
+  <!-- The following response writers are implicitly configured unless
+       overridden...
+    -->
+  <!--
+     <queryResponseWriter name="xml"
+                          default="true"
+                          class="solr.XMLResponseWriter" />
+     <queryResponseWriter name="json" class="solr.JSONResponseWriter"/>
+     <queryResponseWriter name="python" class="solr.PythonResponseWriter"/>
+     <queryResponseWriter name="ruby" class="solr.RubyResponseWriter"/>
+     <queryResponseWriter name="php" class="solr.PHPResponseWriter"/>
+     <queryResponseWriter name="phps" class="solr.PHPSerializedResponseWriter"/>
+     <queryResponseWriter name="csv" class="solr.CSVResponseWriter"/>
+     <queryResponseWriter name="schema.xml" class="solr.SchemaXmlResponseWriter"/>
+    -->
+
+  <!-- Overriding the content-type of the response writer.
+       For example, Default content-type of JSON is application/json. This can be overridden to
+       text/plain so that response is easy to read in *any* browser.
+   -->
+  <!--
+     <queryResponseWriter name="json" class="solr.JSONResponseWriter">
+        <str name="content-type">text/plain; charset=UTF-8</str>
+      </queryResponseWriter>
+   -->
+
+  <!-- Query Parsers
+
+       https://solr.apache.org/guide/solr/latest/query-guide/query-syntax-and-parsers.html
+
+       Multiple QParserPlugins can be registered by name, and then
+       used in either the "defType" param for the QueryComponent (used
+       by SearchHandler) or in LocalParams
+    -->
+  <!-- example of registering a query parser -->
+  <!--
+     <queryParser name="myparser" class="com.mycompany.MyQParserPlugin"/>
+    -->
+
+  <!-- Function Parsers
+
+       https://solr.apache.org/guide/solr/latest/query-guide/function-queries.html
+
+       Multiple ValueSourceParsers can be registered by name, and then
+       used as function names when using the "func" QParser.
+    -->
+  <!-- example of registering a custom function parser  -->
+  <!--
+     <valueSourceParser name="myfunc"
+                        class="com.mycompany.MyValueSourceParser" />
+    -->
+
+
+  <!-- Document Transformers
+       https://solr.apache.org/guide/solr/latest/query-guide/document-transformers.html
+    -->
+  <!--
+     Could be something like:
+     <transformer name="db" class="com.mycompany.LoadFromDatabaseTransformer" >
+       <int name="connection">jdbc://....</int>
+     </transformer>
+
+     To add a constant value to all docs, use:
+     <transformer name="mytrans2" class="org.apache.solr.response.transform.ValueAugmenterFactory" >
+       <int name="value">5</int>
+     </transformer>
+
+     If you want the user to still be able to change it with _value:something_ use this:
+     <transformer name="mytrans3" class="org.apache.solr.response.transform.ValueAugmenterFactory" >
+       <double name="defaultValue">5</double>
+     </transformer>
+
+      If you are using the QueryElevationComponent, you may wish to mark documents that get boosted.  The
+      EditorialMarkerFactory will do exactly that:
+     <transformer name="qecBooster" class="org.apache.solr.response.transform.EditorialMarkerFactory" />
+    -->
+</config>
diff --git a/conf/solr/8.11.1/update-fields.sh b/conf/solr/9.3.0/update-fields.sh
similarity index 100%
rename from conf/solr/8.11.1/update-fields.sh
rename to conf/solr/9.3.0/update-fields.sh
diff --git a/conf/vagrant/etc/shibboleth/attribute-map.xml b/conf/vagrant/etc/shibboleth/attribute-map.xml
deleted file mode 100644
index f6386b620f5..00000000000
--- a/conf/vagrant/etc/shibboleth/attribute-map.xml
+++ /dev/null
@@ -1,141 +0,0 @@
-<Attributes xmlns="urn:mace:shibboleth:2.0:attribute-map" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-
-    <!--
-    The mappings are a mix of SAML 1.1 and SAML 2.0 attribute names agreed to within the Shibboleth
-    community. The non-OID URNs are SAML 1.1 names and most of the OIDs are SAML 2.0 names, with a
-    few exceptions for newer attributes where the name is the same for both versions. You will
-    usually want to uncomment or map the names for both SAML versions as a unit.
-    -->
-    
-    <!-- First some useful eduPerson attributes that many sites might use. -->
-    
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonPrincipalName" id="eppn">
-        <AttributeDecoder xsi:type="ScopedAttributeDecoder"/>
-    </Attribute>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.6" id="eppn">
-        <AttributeDecoder xsi:type="ScopedAttributeDecoder"/>
-    </Attribute>
-    
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonScopedAffiliation" id="affiliation">
-        <AttributeDecoder xsi:type="ScopedAttributeDecoder" caseSensitive="false"/>
-    </Attribute>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.9" id="affiliation">
-        <AttributeDecoder xsi:type="ScopedAttributeDecoder" caseSensitive="false"/>
-    </Attribute>
-    
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonAffiliation" id="unscoped-affiliation">
-        <AttributeDecoder xsi:type="StringAttributeDecoder" caseSensitive="false"/>
-    </Attribute>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.1" id="unscoped-affiliation">
-        <AttributeDecoder xsi:type="StringAttributeDecoder" caseSensitive="false"/>
-    </Attribute>
-    
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonEntitlement" id="entitlement"/>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.7" id="entitlement"/>
-
-    <!-- A persistent id attribute that supports personalized anonymous access. -->
-    
-    <!-- First, the deprecated/incorrect version, decoded as a scoped string: -->
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonTargetedID" id="targeted-id">
-        <AttributeDecoder xsi:type="ScopedAttributeDecoder"/>
-        <!-- <AttributeDecoder xsi:type="NameIDFromScopedAttributeDecoder" formatter="$NameQualifier!$SPNameQualifier!$Name" defaultQualifiers="true"/> -->
-    </Attribute>
-    
-    <!-- Second, an alternate decoder that will decode the incorrect form into the newer form. -->
-    <!--
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonTargetedID" id="persistent-id">
-        <AttributeDecoder xsi:type="NameIDFromScopedAttributeDecoder" formatter="$NameQualifier!$SPNameQualifier!$Name" defaultQualifiers="true"/>
-    </Attribute>
-    -->
-    
-    <!-- Third, the new version (note the OID-style name): -->
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.10" id="persistent-id">
-        <AttributeDecoder xsi:type="NameIDAttributeDecoder" formatter="$NameQualifier!$SPNameQualifier!$Name" defaultQualifiers="true"/>
-    </Attribute>
-
-    <!-- Fourth, the SAML 2.0 NameID Format: -->
-    <Attribute name="urn:oasis:names:tc:SAML:2.0:nameid-format:persistent" id="persistent-id">
-        <AttributeDecoder xsi:type="NameIDAttributeDecoder" formatter="$NameQualifier!$SPNameQualifier!$Name" defaultQualifiers="true"/>
-    </Attribute>
-    
-    <!-- Some more eduPerson attributes, uncomment these to use them... -->
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonPrimaryAffiliation" id="primary-affiliation">
-        <AttributeDecoder xsi:type="StringAttributeDecoder" caseSensitive="false"/>
-    </Attribute>
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonNickname" id="nickname"/>
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonPrimaryOrgUnitDN" id="primary-orgunit-dn"/>
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonOrgUnitDN" id="orgunit-dn"/>
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonOrgDN" id="org-dn"/>
-
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.5" id="primary-affiliation">
-        <AttributeDecoder xsi:type="StringAttributeDecoder" caseSensitive="false"/>
-    </Attribute>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.2" id="nickname"/>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.8" id="primary-orgunit-dn"/>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.4" id="orgunit-dn"/>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.3" id="org-dn"/>
-
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.11" id="assurance"/>
-    
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.5.1.1" id="member"/>
-    
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.6.1.1" id="eduCourseOffering"/>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.6.1.2" id="eduCourseMember"/>
-
-    <!-- Examples of LDAP-based attributes, uncomment to use these... -->
-    <Attribute name="urn:mace:dir:attribute-def:cn" id="cn"/>
-    <Attribute name="urn:mace:dir:attribute-def:sn" id="sn"/>
-    <Attribute name="urn:mace:dir:attribute-def:givenName" id="givenName"/>
-    <Attribute name="urn:mace:dir:attribute-def:displayName" id="displayName"/>
-    <Attribute name="urn:mace:dir:attribute-def:mail" id="mail"/>
-    <Attribute name="urn:mace:dir:attribute-def:telephoneNumber" id="telephoneNumber"/>
-    <Attribute name="urn:mace:dir:attribute-def:title" id="title"/>
-    <Attribute name="urn:mace:dir:attribute-def:initials" id="initials"/>
-    <Attribute name="urn:mace:dir:attribute-def:description" id="description"/>
-    <Attribute name="urn:mace:dir:attribute-def:carLicense" id="carLicense"/>
-    <Attribute name="urn:mace:dir:attribute-def:departmentNumber" id="departmentNumber"/>
-    <Attribute name="urn:mace:dir:attribute-def:employeeNumber" id="employeeNumber"/>
-    <Attribute name="urn:mace:dir:attribute-def:employeeType" id="employeeType"/>
-    <Attribute name="urn:mace:dir:attribute-def:preferredLanguage" id="preferredLanguage"/>
-    <Attribute name="urn:mace:dir:attribute-def:manager" id="manager"/>
-    <Attribute name="urn:mace:dir:attribute-def:seeAlso" id="seeAlso"/>
-    <Attribute name="urn:mace:dir:attribute-def:facsimileTelephoneNumber" id="facsimileTelephoneNumber"/>
-    <Attribute name="urn:mace:dir:attribute-def:street" id="street"/>
-    <Attribute name="urn:mace:dir:attribute-def:postOfficeBox" id="postOfficeBox"/>
-    <Attribute name="urn:mace:dir:attribute-def:postalCode" id="postalCode"/>
-    <Attribute name="urn:mace:dir:attribute-def:st" id="st"/>
-    <Attribute name="urn:mace:dir:attribute-def:l" id="l"/>
-    <Attribute name="urn:mace:dir:attribute-def:o" id="o"/>
-    <Attribute name="urn:mace:dir:attribute-def:ou" id="ou"/>
-    <Attribute name="urn:mace:dir:attribute-def:businessCategory" id="businessCategory"/>
-    <Attribute name="urn:mace:dir:attribute-def:physicalDeliveryOfficeName" id="physicalDeliveryOfficeName"/>
-
-    <Attribute name="urn:oid:0.9.2342.19200300.100.1.1" id="uid"/>
-    <Attribute name="urn:oid:2.5.4.3" id="cn"/>
-    <Attribute name="urn:oid:2.5.4.4" id="sn"/>
-    <Attribute name="urn:oid:2.5.4.42" id="givenName"/>
-    <Attribute name="urn:oid:2.16.840.1.113730.3.1.241" id="displayName"/>
-    <Attribute name="urn:oid:0.9.2342.19200300.100.1.3" id="mail"/>
-    <Attribute name="urn:oid:2.5.4.20" id="telephoneNumber"/>
-    <Attribute name="urn:oid:2.5.4.12" id="title"/>
-    <Attribute name="urn:oid:2.5.4.43" id="initials"/>
-    <Attribute name="urn:oid:2.5.4.13" id="description"/>
-    <Attribute name="urn:oid:2.16.840.1.113730.3.1.1" id="carLicense"/>
-    <Attribute name="urn:oid:2.16.840.1.113730.3.1.2" id="departmentNumber"/>
-    <Attribute name="urn:oid:2.16.840.1.113730.3.1.3" id="employeeNumber"/>
-    <Attribute name="urn:oid:2.16.840.1.113730.3.1.4" id="employeeType"/>
-    <Attribute name="urn:oid:2.16.840.1.113730.3.1.39" id="preferredLanguage"/>
-    <Attribute name="urn:oid:0.9.2342.19200300.100.1.10" id="manager"/>
-    <Attribute name="urn:oid:2.5.4.34" id="seeAlso"/>
-    <Attribute name="urn:oid:2.5.4.23" id="facsimileTelephoneNumber"/>
-    <Attribute name="urn:oid:2.5.4.9" id="street"/>
-    <Attribute name="urn:oid:2.5.4.18" id="postOfficeBox"/>
-    <Attribute name="urn:oid:2.5.4.17" id="postalCode"/>
-    <Attribute name="urn:oid:2.5.4.8" id="st"/>
-    <Attribute name="urn:oid:2.5.4.7" id="l"/>
-    <Attribute name="urn:oid:2.5.4.10" id="o"/>
-    <Attribute name="urn:oid:2.5.4.11" id="ou"/>
-    <Attribute name="urn:oid:2.5.4.15" id="businessCategory"/>
-    <Attribute name="urn:oid:2.5.4.19" id="physicalDeliveryOfficeName"/>
-
-</Attributes>
diff --git a/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml b/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml
deleted file mode 100644
index 67225b5e670..00000000000
--- a/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml
+++ /dev/null
@@ -1,298 +0,0 @@
-<EntitiesDescriptor Name="urn:mace:shibboleth:testshib:two"
-    xmlns="urn:oasis:names:tc:SAML:2.0:metadata" xmlns:ds="http://www.w3.org/2000/09/xmldsig#"
-    xmlns:mdalg="urn:oasis:names:tc:SAML:metadata:algsupport" xmlns:mdui="urn:oasis:names:tc:SAML:metadata:ui"
-    xmlns:shibmd="urn:mace:shibboleth:metadata:1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-
-    <!-- This file contains the metadata for the testing IdP and SP
-     that are operated by TestShib as a service for testing new
-     Shibboleth and SAML providers. -->
-
-    <EntityDescriptor entityID="https://idp.testshib.org/idp/shibboleth">
-        
-        <Extensions>
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha512" />
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#sha384" />
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha256" />
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha512" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha384" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1" />
-        </Extensions>
-
-        <IDPSSODescriptor
-            protocolSupportEnumeration="urn:oasis:names:tc:SAML:1.1:protocol urn:mace:shibboleth:1.0 urn:oasis:names:tc:SAML:2.0:protocol">
-            <Extensions>
-                <shibmd:Scope regexp="false">testshib.org</shibmd:Scope>
-                <mdui:UIInfo>
-                    <mdui:DisplayName xml:lang="en">TestShib Test IdP</mdui:DisplayName>
-                    <mdui:Description xml:lang="en">TestShib IdP. Use this as a source of attributes
-                        for your test SP.</mdui:Description>
-                    <mdui:Logo height="88" width="75"
-                        >https://www.testshib.org/images/testshib-transp.png</mdui:Logo>
-                </mdui:UIInfo>
-
-            </Extensions>
-            <KeyDescriptor>
-                <ds:KeyInfo>
-                    <ds:X509Data>
-                        <ds:X509Certificate>
-                            MIIEDjCCAvagAwIBAgIBADANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJVUzEV
-                            MBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1cmdoMREwDwYD
-                            VQQKEwhUZXN0U2hpYjEZMBcGA1UEAxMQaWRwLnRlc3RzaGliLm9yZzAeFw0wNjA4
-                            MzAyMTEyMjVaFw0xNjA4MjcyMTEyMjVaMGcxCzAJBgNVBAYTAlVTMRUwEwYDVQQI
-                            EwxQZW5uc3lsdmFuaWExEzARBgNVBAcTClBpdHRzYnVyZ2gxETAPBgNVBAoTCFRl
-                            c3RTaGliMRkwFwYDVQQDExBpZHAudGVzdHNoaWIub3JnMIIBIjANBgkqhkiG9w0B
-                            AQEFAAOCAQ8AMIIBCgKCAQEArYkCGuTmJp9eAOSGHwRJo1SNatB5ZOKqDM9ysg7C
-                            yVTDClcpu93gSP10nH4gkCZOlnESNgttg0r+MqL8tfJC6ybddEFB3YBo8PZajKSe
-                            3OQ01Ow3yT4I+Wdg1tsTpSge9gEz7SrC07EkYmHuPtd71CHiUaCWDv+xVfUQX0aT
-                            NPFmDixzUjoYzbGDrtAyCqA8f9CN2txIfJnpHE6q6CmKcoLADS4UrNPlhHSzd614
-                            kR/JYiks0K4kbRqCQF0Dv0P5Di+rEfefC6glV8ysC8dB5/9nb0yh/ojRuJGmgMWH
-                            gWk6h0ihjihqiu4jACovUZ7vVOCgSE5Ipn7OIwqd93zp2wIDAQABo4HEMIHBMB0G
-                            A1UdDgQWBBSsBQ869nh83KqZr5jArr4/7b+QazCBkQYDVR0jBIGJMIGGgBSsBQ86
-                            9nh83KqZr5jArr4/7b+Qa6FrpGkwZzELMAkGA1UEBhMCVVMxFTATBgNVBAgTDFBl
-                            bm5zeWx2YW5pYTETMBEGA1UEBxMKUGl0dHNidXJnaDERMA8GA1UEChMIVGVzdFNo
-                            aWIxGTAXBgNVBAMTEGlkcC50ZXN0c2hpYi5vcmeCAQAwDAYDVR0TBAUwAwEB/zAN
-                            BgkqhkiG9w0BAQUFAAOCAQEAjR29PhrCbk8qLN5MFfSVk98t3CT9jHZoYxd8QMRL
-                            I4j7iYQxXiGJTT1FXs1nd4Rha9un+LqTfeMMYqISdDDI6tv8iNpkOAvZZUosVkUo
-                            93pv1T0RPz35hcHHYq2yee59HJOco2bFlcsH8JBXRSRrJ3Q7Eut+z9uo80JdGNJ4
-                            /SJy5UorZ8KazGj16lfJhOBXldgrhppQBb0Nq6HKHguqmwRfJ+WkxemZXzhediAj
-                            Geka8nz8JjwxpUjAiSWYKLtJhGEaTqCYxCCX2Dw+dOTqUzHOZ7WKv4JXPK5G/Uhr
-                            8K/qhmFT2nIQi538n6rVYLeWj8Bbnl+ev0peYzxFyF5sQA==
-                        </ds:X509Certificate>
-                    </ds:X509Data>
-                </ds:KeyInfo>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes256-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes192-cbc" />
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes128-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#tripledes-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-1_5"/>
-            </KeyDescriptor>
-
-            <ArtifactResolutionService Binding="urn:oasis:names:tc:SAML:1.0:bindings:SOAP-binding"
-                Location="https://idp.testshib.org:8443/idp/profile/SAML1/SOAP/ArtifactResolution"
-                index="1"/>
-            <ArtifactResolutionService Binding="urn:oasis:names:tc:SAML:2.0:bindings:SOAP"
-                Location="https://idp.testshib.org:8443/idp/profile/SAML2/SOAP/ArtifactResolution"
-                index="2"/>
-
-            <NameIDFormat>urn:mace:shibboleth:1.0:nameIdentifier</NameIDFormat>
-            <NameIDFormat>urn:oasis:names:tc:SAML:2.0:nameid-format:transient</NameIDFormat>
-            
-            <SingleSignOnService Binding="urn:mace:shibboleth:1.0:profiles:AuthnRequest"
-                Location="https://idp.testshib.org/idp/profile/Shibboleth/SSO"/>
-            <SingleSignOnService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
-                Location="https://idp.testshib.org/idp/profile/SAML2/POST/SSO"/>
-            <SingleSignOnService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
-                Location="https://idp.testshib.org/idp/profile/SAML2/Redirect/SSO"/>
-            <SingleSignOnService Binding="urn:oasis:names:tc:SAML:2.0:bindings:SOAP" 
-                Location="https://idp.testshib.org/idp/profile/SAML2/SOAP/ECP"/>
-
-        </IDPSSODescriptor>
-
-
-        <AttributeAuthorityDescriptor
-            protocolSupportEnumeration="urn:oasis:names:tc:SAML:1.1:protocol urn:oasis:names:tc:SAML:2.0:protocol">
-
-            <KeyDescriptor>
-                <ds:KeyInfo>
-                    <ds:X509Data>
-                        <ds:X509Certificate>
-                            MIIEDjCCAvagAwIBAgIBADANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJVUzEV
-                            MBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1cmdoMREwDwYD
-                            VQQKEwhUZXN0U2hpYjEZMBcGA1UEAxMQaWRwLnRlc3RzaGliLm9yZzAeFw0wNjA4
-                            MzAyMTEyMjVaFw0xNjA4MjcyMTEyMjVaMGcxCzAJBgNVBAYTAlVTMRUwEwYDVQQI
-                            EwxQZW5uc3lsdmFuaWExEzARBgNVBAcTClBpdHRzYnVyZ2gxETAPBgNVBAoTCFRl
-                            c3RTaGliMRkwFwYDVQQDExBpZHAudGVzdHNoaWIub3JnMIIBIjANBgkqhkiG9w0B
-                            AQEFAAOCAQ8AMIIBCgKCAQEArYkCGuTmJp9eAOSGHwRJo1SNatB5ZOKqDM9ysg7C
-                            yVTDClcpu93gSP10nH4gkCZOlnESNgttg0r+MqL8tfJC6ybddEFB3YBo8PZajKSe
-                            3OQ01Ow3yT4I+Wdg1tsTpSge9gEz7SrC07EkYmHuPtd71CHiUaCWDv+xVfUQX0aT
-                            NPFmDixzUjoYzbGDrtAyCqA8f9CN2txIfJnpHE6q6CmKcoLADS4UrNPlhHSzd614
-                            kR/JYiks0K4kbRqCQF0Dv0P5Di+rEfefC6glV8ysC8dB5/9nb0yh/ojRuJGmgMWH
-                            gWk6h0ihjihqiu4jACovUZ7vVOCgSE5Ipn7OIwqd93zp2wIDAQABo4HEMIHBMB0G
-                            A1UdDgQWBBSsBQ869nh83KqZr5jArr4/7b+QazCBkQYDVR0jBIGJMIGGgBSsBQ86
-                            9nh83KqZr5jArr4/7b+Qa6FrpGkwZzELMAkGA1UEBhMCVVMxFTATBgNVBAgTDFBl
-                            bm5zeWx2YW5pYTETMBEGA1UEBxMKUGl0dHNidXJnaDERMA8GA1UEChMIVGVzdFNo
-                            aWIxGTAXBgNVBAMTEGlkcC50ZXN0c2hpYi5vcmeCAQAwDAYDVR0TBAUwAwEB/zAN
-                            BgkqhkiG9w0BAQUFAAOCAQEAjR29PhrCbk8qLN5MFfSVk98t3CT9jHZoYxd8QMRL
-                            I4j7iYQxXiGJTT1FXs1nd4Rha9un+LqTfeMMYqISdDDI6tv8iNpkOAvZZUosVkUo
-                            93pv1T0RPz35hcHHYq2yee59HJOco2bFlcsH8JBXRSRrJ3Q7Eut+z9uo80JdGNJ4
-                            /SJy5UorZ8KazGj16lfJhOBXldgrhppQBb0Nq6HKHguqmwRfJ+WkxemZXzhediAj
-                            Geka8nz8JjwxpUjAiSWYKLtJhGEaTqCYxCCX2Dw+dOTqUzHOZ7WKv4JXPK5G/Uhr
-                            8K/qhmFT2nIQi538n6rVYLeWj8Bbnl+ev0peYzxFyF5sQA==
-                        </ds:X509Certificate>
-                    </ds:X509Data>
-                </ds:KeyInfo>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes256-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes192-cbc" />
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes128-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#tripledes-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-1_5"/>
-            </KeyDescriptor>
-
-
-            <AttributeService Binding="urn:oasis:names:tc:SAML:1.0:bindings:SOAP-binding"
-                Location="https://idp.testshib.org:8443/idp/profile/SAML1/SOAP/AttributeQuery"/>
-            <AttributeService Binding="urn:oasis:names:tc:SAML:2.0:bindings:SOAP"
-                Location="https://idp.testshib.org:8443/idp/profile/SAML2/SOAP/AttributeQuery"/>
-
-            <NameIDFormat>urn:mace:shibboleth:1.0:nameIdentifier</NameIDFormat>
-            <NameIDFormat>urn:oasis:names:tc:SAML:2.0:nameid-format:transient</NameIDFormat>
-
-        </AttributeAuthorityDescriptor>
-
-        <Organization>
-            <OrganizationName xml:lang="en">TestShib Two Identity Provider</OrganizationName>
-            <OrganizationDisplayName xml:lang="en">TestShib Two</OrganizationDisplayName>
-            <OrganizationURL xml:lang="en">http://www.testshib.org/testshib-two/</OrganizationURL>
-        </Organization>
-        <ContactPerson contactType="technical">
-            <GivenName>Nate</GivenName>
-            <SurName>Klingenstein</SurName>
-            <EmailAddress>ndk@internet2.edu</EmailAddress>
-        </ContactPerson>
-    </EntityDescriptor>
-
-    <EntityDescriptor entityID="https://sp.testshib.org/shibboleth-sp">
-
-        <Extensions>
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha512" />
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#sha384" />
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha256" />
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha512" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha384" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1" />
-        </Extensions>
-        
-        <!-- An SP supporting SAML 1 and 2 contains this element with protocol support as shown. -->
-        <SPSSODescriptor
-            protocolSupportEnumeration="urn:oasis:names:tc:SAML:2.0:protocol urn:oasis:names:tc:SAML:1.1:protocol http://schemas.xmlsoap.org/ws/2003/07/secext">
-
-            <Extensions>
-                <!-- Extension to permit the SP to receive IdP discovery responses. -->
-                <idpdisc:DiscoveryResponse
-                    xmlns:idpdisc="urn:oasis:names:tc:SAML:profiles:SSO:idp-discovery-protocol"
-                    index="1" Binding="urn:oasis:names:tc:SAML:profiles:SSO:idp-discovery-protocol"
-                    Location="https://sp.testshib.org/Shibboleth.sso/DS"/>
-                
-                <mdui:UIInfo>
-                    <mdui:DisplayName xml:lang="en">TestShib Test SP</mdui:DisplayName>
-                    <mdui:Description xml:lang="en">TestShib SP. Log into this to test your machine.
-                        Once logged in check that all attributes that you expected have been
-                        released.</mdui:Description>
-                    <mdui:Logo height="88" width="75">https://www.testshib.org/images/testshib-transp.png</mdui:Logo>
-                </mdui:UIInfo>
-            </Extensions>
-
-            <KeyDescriptor>
-                <ds:KeyInfo>
-                    <ds:X509Data>
-                        <ds:X509Certificate>
-                            MIIEPjCCAyagAwIBAgIBADANBgkqhkiG9w0BAQUFADB3MQswCQYDVQQGEwJVUzEV
-                            MBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1cmdoMSIwIAYD
-                            VQQKExlUZXN0U2hpYiBTZXJ2aWNlIFByb3ZpZGVyMRgwFgYDVQQDEw9zcC50ZXN0
-                            c2hpYi5vcmcwHhcNMDYwODMwMjEyNDM5WhcNMTYwODI3MjEyNDM5WjB3MQswCQYD
-                            VQQGEwJVUzEVMBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1
-                            cmdoMSIwIAYDVQQKExlUZXN0U2hpYiBTZXJ2aWNlIFByb3ZpZGVyMRgwFgYDVQQD
-                            Ew9zcC50ZXN0c2hpYi5vcmcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
-                            AQDJyR6ZP6MXkQ9z6RRziT0AuCabDd3x1m7nLO9ZRPbr0v1LsU+nnC363jO8nGEq
-                            sqkgiZ/bSsO5lvjEt4ehff57ERio2Qk9cYw8XCgmYccVXKH9M+QVO1MQwErNobWb
-                            AjiVkuhWcwLWQwTDBowfKXI87SA7KR7sFUymNx5z1aoRvk3GM++tiPY6u4shy8c7
-                            vpWbVfisfTfvef/y+galxjPUQYHmegu7vCbjYP3On0V7/Ivzr+r2aPhp8egxt00Q
-                            XpilNai12LBYV3Nv/lMsUzBeB7+CdXRVjZOHGuQ8mGqEbsj8MBXvcxIKbcpeK5Zi
-                            JCVXPfarzuriM1G5y5QkKW+LAgMBAAGjgdQwgdEwHQYDVR0OBBYEFKB6wPDxwYrY
-                            StNjU5P4b4AjBVQVMIGhBgNVHSMEgZkwgZaAFKB6wPDxwYrYStNjU5P4b4AjBVQV
-                            oXukeTB3MQswCQYDVQQGEwJVUzEVMBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYD
-                            VQQHEwpQaXR0c2J1cmdoMSIwIAYDVQQKExlUZXN0U2hpYiBTZXJ2aWNlIFByb3Zp
-                            ZGVyMRgwFgYDVQQDEw9zcC50ZXN0c2hpYi5vcmeCAQAwDAYDVR0TBAUwAwEB/zAN
-                            BgkqhkiG9w0BAQUFAAOCAQEAc06Kgt7ZP6g2TIZgMbFxg6vKwvDL0+2dzF11Onpl
-                            5sbtkPaNIcj24lQ4vajCrrGKdzHXo9m54BzrdRJ7xDYtw0dbu37l1IZVmiZr12eE
-                            Iay/5YMU+aWP1z70h867ZQ7/7Y4HW345rdiS6EW663oH732wSYNt9kr7/0Uer3KD
-                            9CuPuOidBacospDaFyfsaJruE99Kd6Eu/w5KLAGG+m0iqENCziDGzVA47TngKz2v
-                            PVA+aokoOyoz3b53qeti77ijatSEoKjxheBWpO+eoJeGq/e49Um3M2ogIX/JAlMa
-                            Inh+vYSYngQB2sx9LGkR9KHaMKNIGCDehk93Xla4pWJx1w== 
-                        </ds:X509Certificate>
-                    </ds:X509Data>
-                </ds:KeyInfo>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes256-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes192-cbc" />
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes128-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#tripledes-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-1_5"/>
-            </KeyDescriptor>
-
-            <!-- This tells IdPs that Single Logout is supported and where/how to request it. -->
-
-            <SingleLogoutService Location="https://sp.testshib.org/Shibboleth.sso/SLO/SOAP"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:SOAP"/>
-            <SingleLogoutService Location="https://sp.testshib.org/Shibboleth.sso/SLO/Redirect"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"/>
-            <SingleLogoutService Location="https://sp.testshib.org/Shibboleth.sso/SLO/POST"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"/>
-            <SingleLogoutService Location="https://sp.testshib.org/Shibboleth.sso/SLO/Artifact"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Artifact"/>
-
-
-            <!-- This tells IdPs that you only need transient identifiers. -->
-            <NameIDFormat>urn:oasis:names:tc:SAML:2.0:nameid-format:transient</NameIDFormat>
-            <NameIDFormat>urn:mace:shibboleth:1.0:nameIdentifier</NameIDFormat>
-
-            <!--
-		This tells IdPs where and how to send authentication assertions. Mostly
-		the SP will tell the IdP what location to use in its request, but this
-		is how the IdP validates the location and also figures out which
-		SAML version/binding to use.
-		-->
-
-            <AssertionConsumerService index="1" isDefault="true"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
-                Location="https://sp.testshib.org/Shibboleth.sso/SAML2/POST"/>
-            <AssertionConsumerService index="2"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST-SimpleSign"
-                Location="https://sp.testshib.org/Shibboleth.sso/SAML2/POST-SimpleSign"/>
-            <AssertionConsumerService index="3"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Artifact"
-                Location="https://sp.testshib.org/Shibboleth.sso/SAML2/Artifact"/>
-            <AssertionConsumerService index="4"
-                Binding="urn:oasis:names:tc:SAML:1.0:profiles:browser-post"
-                Location="https://sp.testshib.org/Shibboleth.sso/SAML/POST"/>
-            <AssertionConsumerService index="5"
-                Binding="urn:oasis:names:tc:SAML:1.0:profiles:artifact-01"
-                Location="https://sp.testshib.org/Shibboleth.sso/SAML/Artifact"/>
-            <AssertionConsumerService index="6"
-                Binding="http://schemas.xmlsoap.org/ws/2003/07/secext"
-                Location="https://sp.testshib.org/Shibboleth.sso/ADFS"/>
-
-            <!-- A couple additional assertion consumers for the registration webapp. -->
-
-            <AssertionConsumerService index="7"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
-                Location="https://www.testshib.org/Shibboleth.sso/SAML2/POST"/>
-            <AssertionConsumerService index="8"
-                Binding="urn:oasis:names:tc:SAML:1.0:profiles:browser-post"
-                Location="https://www.testshib.org/Shibboleth.sso/SAML/POST"/>
-
-        </SPSSODescriptor>
-
-        <!-- This is just information about the entity in human terms. -->
-        <Organization>
-            <OrganizationName xml:lang="en">TestShib Two Service Provider</OrganizationName>
-            <OrganizationDisplayName xml:lang="en">TestShib Two</OrganizationDisplayName>
-            <OrganizationURL xml:lang="en">http://www.testshib.org/testshib-two/</OrganizationURL>
-        </Organization>
-        <ContactPerson contactType="technical">
-            <GivenName>Nate</GivenName>
-            <SurName>Klingenstein</SurName>
-            <EmailAddress>ndk@internet2.edu</EmailAddress>
-        </ContactPerson>
-
-    </EntityDescriptor>
-
-
-</EntitiesDescriptor>
-
diff --git a/conf/vagrant/etc/shibboleth/shibboleth2.xml b/conf/vagrant/etc/shibboleth/shibboleth2.xml
deleted file mode 100644
index 946e73bdf6a..00000000000
--- a/conf/vagrant/etc/shibboleth/shibboleth2.xml
+++ /dev/null
@@ -1,85 +0,0 @@
-<!--
-This is an example shibboleth2.xml generated for you by TestShib.  It's reduced and recommended
-specifically for testing.  You don't need to change anything, but you may want to explore the file
-to learn about how your SP works.  Uncomment attributes in your attribute-map.xml file to test them.
-
-If you want to test advanced functionality, start from the distribution shibboleth2.xml and add the
-MetadataProvider, the right entityID, and a properly configured SSO element.  More information:
-
-https://wiki.shibboleth.net/confluence/display/SHIB2/NativeSPConfiguration
--->
-
-<SPConfig xmlns="urn:mace:shibboleth:2.0:native:sp:config" xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata"
-    clockSkew="1800">
-
-    <!-- The entityID is the name TestShib made for your SP. -->
-    <ApplicationDefaults entityID="https://pdurbin.pagekite.me/shibboleth"
-        REMOTE_USER="eppn" attributePrefix="AJP_">
-
-        <!-- You should use secure cookies if at all possible.  See cookieProps in this Wiki article. -->
-        <!-- https://wiki.shibboleth.net/confluence/display/SHIB2/NativeSPSessions -->
-        <Sessions lifetime="28800" timeout="3600" checkAddress="false" relayState="ss:mem" handlerSSL="false">
-
-            <!-- Triggers a login request directly to the TestShib IdP. -->
-            <!-- https://wiki.shibboleth.net/confluence/display/SHIB2/NativeSPServiceSSO -->
-            <!-- A single explicit entityId could be used for Dataverse installation that only cares about one IdP. -->
-            <!-- <SSO entityID="https://idp.testshib.org/idp/shibboleth">SAML2 SAML1</SSO> -->
-
-            <!-- not used but maybe helpful for troubleshooting -->
-            <!--<SSO discoveryProtocol="SAMLDS" discoveryURL="https://pdurbin.pagekite.me/shibboleth-ds/index.html">SAML2 SAML1</SSO>-->
-            <!-- picker is displayed as an iframe in this page -->
-            <SSO discoveryProtocol="SAMLDS" discoveryURL="http://pdurbin.pagekite.me/loginpage.xhtml">SAML2 SAML1</SSO>
-<!--
--->
-            <!-- SAML and local-only logout. -->
-            <!-- https://wiki.shibboleth.net/confluence/display/SHIB2/NativeSPServiceLogout -->
-            <Logout>SAML2 Local</Logout>
-
-            <!--
-                Handlers allow you to interact with the SP and gather more information.  Try them out!
-                Attribute values received by the SP through SAML will be visible at:
-                http://pdurbin.pagekite.me/Shibboleth.sso/Session
-            -->
-
-            <!-- Extension service that generates "approximate" metadata based on SP configuration. -->
-            <Handler type="MetadataGenerator" Location="/Metadata" signing="false"/>
-
-            <!-- Status reporting service. -->
-            <Handler type="Status" Location="/Status" acl="127.0.0.1"/>
-
-            <!-- Session diagnostic service. -->
-            <Handler type="Session" Location="/Session" showAttributeValues="true"/>
-
-            <!-- JSON feed of discovery information. -->
-            <Handler type="DiscoveryFeed" Location="/DiscoFeed"/>
-
-        </Sessions>
-
-        <!-- Error pages to display to yourself if something goes horribly wrong. -->
-        <Errors supportContact="root@localhost" logoLocation="/shibboleth-sp/logo.jpg" 
-                styleSheet="/shibboleth-sp/main.css"/>
-
-        <!-- Loads and trusts a metadata file that describes only the Testshib IdP and how to communicate with it. -->
-        <!-- For Dataverse we hard code a local file instead of using a URI -->
-        <!-- <MetadataProvider type="XML" uri="http://www.testshib.org/metadata/testshib-providers.xml" backingFilePath="testshib-two-idp-metadata.xml" reloadInterval="180000" /> -->
-
-        <!-- IdPs we want allow go in /etc/shibboleth/dataverse-idp-metadata.xml -->
-        <MetadataProvider type="XML" file="dataverse-idp-metadata.xml" backingFilePath="local-idp-metadata.xml" legacyOrgNames="true" reloadInterval="7200"/>
-
-        <!-- Attribute and trust options you shouldn't need to change. -->
-        <AttributeExtractor type="XML" validate="true" path="attribute-map.xml"/>
-        <AttributeResolver type="Query" subjectMatch="true"/>
-        <AttributeFilter type="XML" validate="true" path="attribute-policy.xml"/>
-
-        <!-- Your SP generated these credentials.  They're used to talk to IdP's. -->
-        <CredentialResolver type="File" key="sp-key.pem" certificate="sp-cert.pem"/>
-
-    </ApplicationDefaults>
-    
-    <!-- Security policies you shouldn't change unless you know what you're doing. -->
-    <SecurityPolicyProvider type="XML" validate="true" path="security-policy.xml"/>
-
-    <!-- Low-level configuration about protocols and bindings available for use. -->
-    <ProtocolProvider type="XML" validate="true" reloadChanges="false" path="protocols.xml"/>
-
-</SPConfig>
diff --git a/conf/vagrant/etc/yum.repos.d/epel-apache-maven.repo b/conf/vagrant/etc/yum.repos.d/epel-apache-maven.repo
deleted file mode 100644
index 1e0f8200040..00000000000
--- a/conf/vagrant/etc/yum.repos.d/epel-apache-maven.repo
+++ /dev/null
@@ -1,15 +0,0 @@
-# Place this file in your /etc/yum.repos.d/ directory
-
-[epel-apache-maven]
-name=maven from apache foundation.
-baseurl=http://repos.fedorapeople.org/repos/dchen/apache-maven/epel-$releasever/$basearch/
-enabled=1
-skip_if_unavailable=1
-gpgcheck=0
-
-[epel-apache-maven-source]
-name=maven from apache foundation. - Source
-baseurl=http://repos.fedorapeople.org/repos/dchen/apache-maven/epel-$releasever/SRPMS
-enabled=0
-skip_if_unavailable=1
-gpgcheck=0
diff --git a/conf/vagrant/etc/yum.repos.d/shibboleth.repo b/conf/vagrant/etc/yum.repos.d/shibboleth.repo
deleted file mode 100644
index adf42185d8a..00000000000
--- a/conf/vagrant/etc/yum.repos.d/shibboleth.repo
+++ /dev/null
@@ -1,9 +0,0 @@
-[shibboleth]
-name=Shibboleth (rockylinux8)
-# Please report any problems to https://shibboleth.atlassian.net/jira
-type=rpm-md
-mirrorlist=https://shibboleth.net/cgi-bin/mirrorlist.cgi/rockylinux8
-gpgcheck=1
-gpgkey=https://shibboleth.net/downloads/service-provider/RPMS/repomd.xml.key
-        https://shibboleth.net/downloads/service-provider/RPMS/cantor.repomd.xml.key
-enabled=1
diff --git a/conf/vagrant/var/lib/pgsql/data/pg_hba.conf b/conf/vagrant/var/lib/pgsql/data/pg_hba.conf
deleted file mode 100644
index e3244686066..00000000000
--- a/conf/vagrant/var/lib/pgsql/data/pg_hba.conf
+++ /dev/null
@@ -1,74 +0,0 @@
-# PostgreSQL Client Authentication Configuration File
-# ===================================================
-#
-# Refer to the "Client Authentication" section in the
-# PostgreSQL documentation for a complete description
-# of this file.  A short synopsis follows.
-#
-# This file controls: which hosts are allowed to connect, how clients
-# are authenticated, which PostgreSQL user names they can use, which
-# databases they can access.  Records take one of these forms:
-#
-# local      DATABASE  USER  METHOD  [OPTIONS]
-# host       DATABASE  USER  CIDR-ADDRESS  METHOD  [OPTIONS]
-# hostssl    DATABASE  USER  CIDR-ADDRESS  METHOD  [OPTIONS]
-# hostnossl  DATABASE  USER  CIDR-ADDRESS  METHOD  [OPTIONS]
-#
-# (The uppercase items must be replaced by actual values.)
-#
-# The first field is the connection type: "local" is a Unix-domain socket,
-# "host" is either a plain or SSL-encrypted TCP/IP socket, "hostssl" is an
-# SSL-encrypted TCP/IP socket, and "hostnossl" is a plain TCP/IP socket.
-#
-# DATABASE can be "all", "sameuser", "samerole", a database name, or
-# a comma-separated list thereof.
-#
-# USER can be "all", a user name, a group name prefixed with "+", or
-# a comma-separated list thereof.  In both the DATABASE and USER fields
-# you can also write a file name prefixed with "@" to include names from
-# a separate file.
-#
-# CIDR-ADDRESS specifies the set of hosts the record matches.
-# It is made up of an IP address and a CIDR mask that is an integer
-# (between 0 and 32 (IPv4) or 128 (IPv6) inclusive) that specifies
-# the number of significant bits in the mask.  Alternatively, you can write
-# an IP address and netmask in separate columns to specify the set of hosts.
-#
-# METHOD can be "trust", "reject", "md5", "password", "gss", "sspi", "krb5",
-# "ident", "pam", "ldap" or "cert".  Note that "password" sends passwords
-# in clear text; "md5" is preferred since it sends encrypted passwords.
-#
-# OPTIONS are a set of options for the authentication in the format
-# NAME=VALUE. The available options depend on the different authentication
-# methods - refer to the "Client Authentication" section in the documentation
-# for a list of which options are available for which authentication methods.
-#
-# Database and user names containing spaces, commas, quotes and other special
-# characters must be quoted. Quoting one of the keywords "all", "sameuser" or
-# "samerole" makes the name lose its special character, and just match a
-# database or username with that name.
-#
-# This file is read on server startup and when the postmaster receives
-# a SIGHUP signal.  If you edit the file on a running system, you have
-# to SIGHUP the postmaster for the changes to take effect.  You can use
-# "pg_ctl reload" to do that.
-
-# Put your actual configuration here
-# ----------------------------------
-#
-# If you want to allow non-local connections, you need to add more
-# "host" records. In that case you will also need to make PostgreSQL listen
-# on a non-local interface via the listen_addresses configuration parameter,
-# or via the -i or -h command line switches.
-#
-
-
-
-# TYPE  DATABASE    USER        CIDR-ADDRESS          METHOD
-
-# "local" is for Unix domain socket connections only
-local   all         all                               trust
-# IPv4 local connections:
-host    all         all         127.0.0.1/32          trust
-# IPv6 local connections:
-host    all         all         ::1/128               trust
diff --git a/conf/vagrant/var/www/dataverse/error-documents/503.html b/conf/vagrant/var/www/dataverse/error-documents/503.html
deleted file mode 100644
index 95a7dea4107..00000000000
--- a/conf/vagrant/var/www/dataverse/error-documents/503.html
+++ /dev/null
@@ -1 +0,0 @@
-<p>Custom "site is unavailable" 503 page.</p>
diff --git a/doc/release-notes/6.0-release-notes.md b/doc/release-notes/6.0-release-notes.md
new file mode 100644
index 00000000000..df916216f5b
--- /dev/null
+++ b/doc/release-notes/6.0-release-notes.md
@@ -0,0 +1,300 @@
+# Dataverse 6.0
+
+This is a platform upgrade release. Payara, Solr, and Java have been upgraded. No features have been added to the Dataverse software itself. Only a handful of bugs were fixed.
+
+Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project!
+
+## Release Highlights (Major Upgrades, Breaking Changes)
+
+This release contains major upgrades to core components. Detailed upgrade instructions can be found below.
+
+### Runtime
+
+- The required Java version has been increased from version 11 to 17.
+    - See PR #9764 for details.
+- Payara application server has been upgraded to version 6.2023.8.
+    - This is a required update.
+    - Please note that Payara Community 5 has reached [end of life](https://www.payara.fish/products/payara-platform-product-lifecycle/)
+    - See PR #9685 and PR #9795 for details.
+- Solr has been upgraded to version 9.3.0.
+    - See PR #9787 for details.
+- PostgreSQL 13 remains the tested and supported version.
+    - That said, the installer and Flyway have been upgraded to support PostgreSQL 14 and 15. See the [PostgreSQL](https://guides.dataverse.org/en/6.0/installation/prerequisites.html#postgresql) section of the Installation Guide and PR #9877 for details.
+
+### Development
+
+- Removal of Vagrant and Docker All In One (docker-aio), deprecated in Dataverse v5.14. See PR #9838 and PR #9685 for details.
+- All tests have been migrated to use JUnit 5 exclusively from now on. See PR #9796 for details.
+
+## Installation
+
+If this is a new installation, please follow our [Installation Guide](https://guides.dataverse.org/en/latest/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/latest/installation/intro.html#getting-help) if you need it!
+
+Once you are in production, we would be delighted to update our [map of Dataverse installations](https://dataverse.org/installations) around the world to include yours! Please [create an issue](https://github.com/IQSS/dataverse-installations/issues) or email us at support@dataverse.org to join the club!
+
+You are also very welcome to join the [Global Dataverse Community Consortium](https://dataversecommunity.global) (GDCC).
+
+## Upgrade Instructions
+
+Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc.
+
+These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 5.14.
+
+### Upgrade from Java 11 to Java 17
+
+Java 17 is now required for Dataverse. Solr can run under Java 11 or Java 17 but the latter is recommended. In preparation for the Java upgrade, stop both Dataverse/Payara and Solr.
+
+1. Undeploy Dataverse, if deployed, using the unprivileged service account.
+
+   `sudo -u dataverse /usr/local/payara5/bin/asadmin list-applications`
+
+   `sudo -u dataverse /usr/local/payara5/bin/asadmin undeploy dataverse-5.14`
+
+1. Stop Payara 5.
+
+   `sudo -u dataverse /usr/local/payara5/bin/asadmin stop-domain`
+
+1. Stop Solr 8.
+
+   `sudo systemctl stop solr.service`
+
+1. Install Java 17.
+
+   Assuming you are using RHEL or a derivative such as Rocky Linux:
+
+   `sudo yum install java-17-openjdk`
+
+1. Set Java 17 as the default.
+
+   Assuming you are using RHEL or a derivative such as Rocky Linux:
+
+   `sudo alternatives --config java`
+
+1. Test that Java 17 is the default.
+
+   `java -version`
+
+### Upgrade from Payara 5 to Payara 6
+
+If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user.
+
+1. Download Payara 6.2023.8.
+
+   `curl -L -O https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip`
+
+1. Unzip it to /usr/local (or your preferred location).
+
+   `sudo unzip payara-6.2023.8.zip -d /usr/local/`
+
+1. Change ownership of the unzipped Payara to your "service" user ("dataverse" by default).
+
+   `sudo chown -R dataverse /usr/local/payara6`
+
+1. Undeploy Dataverse, if deployed, using the unprivileged service account.
+
+   `sudo -u dataverse /usr/local/payara5/bin/asadmin list-applications`
+
+   `sudo -u dataverse /usr/local/payara5/bin/asadmin undeploy dataverse-5.14`
+
+1. Stop Payara 5, if running.
+
+   `sudo -u dataverse /usr/local/payara5/bin/asadmin stop-domain`
+
+1. Copy Dataverse-related lines from Payara 5 to Payara 6 domain.xml.
+
+   `sudo -u dataverse cp /usr/local/payara6/glassfish/domains/domain1/config/domain.xml /usr/local/payara6/glassfish/domains/domain1/config/domain.xml.orig`
+
+   `sudo egrep 'dataverse|doi' /usr/local/payara5/glassfish/domains/domain1/config/domain.xml > lines.txt`
+
+   `sudo vi /usr/local/payara6/glassfish/domains/domain1/config/domain.xml`
+
+   The lines will appear in two sections, examples shown below (but your content will vary).
+
+   Section 1: system properties (under `<server name="server" config-ref="server-config">`)
+
+   ```
+   <system-property name="dataverse.db.user" value="dvnuser"></system-property>
+   <system-property name="dataverse.db.host" value="localhost"></system-property>
+   <system-property name="dataverse.db.port" value="5432"></system-property>
+   <system-property name="dataverse.db.name" value="dvndb"></system-property>
+   <system-property name="dataverse.db.password" value="dvnsecret"></system-property>
+   ```
+
+   Note: if you used the Dataverse installer, you won't have a `dataverse.db.password` property. See "Create password aliases" below.
+
+   Section 2: JVM options (under `<java-config classpath-suffix="" debug-options="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=9009" system-classpath="">`, the one under `<config name="server-config">`, not under `<config name="default-config">`)
+
+   ```
+   <jvm-options>-Ddataverse.files.directory=/usr/local/dvn/data</jvm-options>
+   <jvm-options>-Ddataverse.files.file.type=file</jvm-options>
+   <jvm-options>-Ddataverse.files.file.label=file</jvm-options>
+   <jvm-options>-Ddataverse.files.file.directory=/usr/local/dvn/data</jvm-options>
+   <jvm-options>-Ddataverse.rserve.host=localhost</jvm-options>
+   <jvm-options>-Ddataverse.rserve.port=6311</jvm-options>
+   <jvm-options>-Ddataverse.rserve.user=rserve</jvm-options>
+   <jvm-options>-Ddataverse.rserve.password=rserve</jvm-options>
+   <jvm-options>-Ddataverse.auth.password-reset-timeout-in-minutes=60</jvm-options>
+   <jvm-options>-Ddataverse.timerServer=true</jvm-options>
+   <jvm-options>-Ddataverse.fqdn=dev1.dataverse.org</jvm-options>
+   <jvm-options>-Ddataverse.siteUrl=https://dev1.dataverse.org</jvm-options>
+   <jvm-options>-Ddataverse.files.storage-driver-id=file</jvm-options>
+   <jvm-options>-Ddoi.username=testaccount</jvm-options>
+   <jvm-options>-Ddoi.password=notmypassword</jvm-options>
+   <jvm-options>-Ddoi.baseurlstring=https://mds.test.datacite.org/</jvm-options>
+   <jvm-options>-Ddoi.dataciterestapiurlstring=https://api.test.datacite.org</jvm-options>
+   ```
+
+1. Check the `Xmx` setting in `domain.xml`.
+
+   Under `/usr/local/payara6/glassfish/domains/domain1/config/domain.xml`, check the `Xmx` setting under `<config name="server-config">`, where you put the JVM options, not the one under `<config name="default-config">`. Note that there are two such settings, and you want to adjust the one in the stanza with Dataverse options. This sets the JVM heap size; a good rule of thumb is half of your system's total RAM. You may specify the value in MB (`8192m`) or GB (`8g`).
+
+1. Copy `jhove.conf` and `jhoveConfig.xsd` from Payara 5, edit and change `payara5` to `payara6`.
+
+   `sudo cp /usr/local/payara5/glassfish/domains/domain1/config/jhove* /usr/local/payara6/glassfish/domains/domain1/config/`
+
+   `sudo chown dataverse /usr/local/payara6/glassfish/domains/domain1/config/jhove*`
+
+   `sudo -u dataverse vi /usr/local/payara6/glassfish/domains/domain1/config/jhove.conf`
+
+1. Copy logos from Payara 5 to Payara 6.
+
+   These logos are for collections (dataverses).
+
+   `sudo -u dataverse cp -r /usr/local/payara5/glassfish/domains/domain1/docroot/logos /usr/local/payara6/glassfish/domains/domain1/docroot`
+
+1. If you are using Make Data Count (MDC), edit :MDCLogPath.
+
+   Your `:MDCLogPath` database setting might be pointing to a Payara 5 directory such as `/usr/local/payara5/glassfish/domains/domain1/logs`. If so, edit this to be Payara 6. You'll probably want to copy your logs over as well.
+
+1. Update systemd unit file (or other init system) from `/usr/local/payara5` to `/usr/local/payara6`, if applicable.
+
+1. Start Payara.
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin start-domain`
+
+1. Create a Java mail resource, replacing "localhost" for mailhost with your mail relay server, and replacing "localhost" for fromaddress with the FQDN of your Dataverse server.
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin create-javamail-resource --mailhost "localhost" --mailuser "dataversenotify" --fromaddress "do-not-reply@localhost" mail/notifyMailSession`
+
+1. Create password aliases for your database, rserve and datacite jvm-options, if you're using them.
+
+   `echo "AS_ADMIN_ALIASPASSWORD=yourDBpassword" > /tmp/dataverse.db.password.txt`
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin create-password-alias --passwordfile /tmp/dataverse.db.password.txt`
+
+   When you are prompted "Enter the value for the aliasname operand", enter `dataverse.db.password`
+
+   You should see "Command create-password-alias executed successfully."
+
+   You'll want to perform similar commands for `rserve_password_alias` and `doi_password_alias` if you're using Rserve and/or DataCite.
+
+1. Enable workaround for FISH-7722.
+
+   The following workaround is for https://github.com/payara/Payara/issues/6337
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin create-jvm-options --add-opens=java.base/java.io=ALL-UNNAMED`
+
+1. Create the network listener on port 8009.
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector`
+
+1. Deploy the Dataverse 6.0 war file.
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin deploy /path/to/dataverse-6.0.war`
+
+1. Check that you get a version number from Dataverse.
+
+   This is just a sanity check that Dataverse has been deployed properly.
+
+   `curl http://localhost:8080/api/info/version`
+
+1. Perform one final Payara restart to ensure that timers are initialized properly.
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin stop-domain`
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin start-domain`
+
+### Upgrade from Solr 8 to 9
+
+Solr has been upgraded to Solr 9. You must install Solr fresh and reindex. You cannot use your old `schema.xml` because the format has changed.
+
+The instructions below are copied from https://guides.dataverse.org/en/6.0/installation/prerequisites.html#installing-solr and tweaked a bit for an upgrade scenario.
+
+We assume that you already have a user called "solr" (from the instructions above), added during your initial installation of Solr. We also assume that you have already stopped Solr 8 as explained in the instructions above about upgrading Java.
+
+1. Become the "solr" user and then download and configure Solr.
+
+   `su - solr`
+
+   `cd /usr/local/solr`
+
+   `wget https://archive.apache.org/dist/solr/solr/9.3.0/solr-9.3.0.tgz`
+
+   `tar xvzf solr-9.3.0.tgz`
+
+   `cd solr-9.3.0`
+
+   `cp -r server/solr/configsets/_default server/solr/collection1`
+
+1. Unzip "dvinstall.zip" from this release. Unzip it into /tmp. Then copy the following files into place.
+
+   `cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf`
+
+   `cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf`
+
+1. A Dataverse installation requires a change to the jetty.xml file that ships with Solr.
+
+   Edit `/usr/local/solr/solr-9.3.0/server/etc/jetty.xml`, increasing `requestHeaderSize` from `8192` to `102400`
+
+1. Tell Solr to create the core "collection1" on startup.
+
+   `echo "name=collection1" > /usr/local/solr/solr-9.3.0/server/solr/collection1/core.properties`
+
+1. Update your init script.
+
+   Your init script may be located at `/etc/systemd/system/solr.service`, for example. Update the path to Solr to be `/usr/local/solr/solr-9.3.0`.
+
+1. Start Solr using your init script and check collection1.
+
+   The collection1 check below should print out fields Dataverse uses like "dsDescription".
+
+   `systemctl start solr.service`
+
+   `curl http://localhost:8983/solr/collection1/schema/fields`
+
+1. Reindex Solr.
+
+   For details, see https://guides.dataverse.org/en/6.0/admin/solr-search-index.html but here is the reindex command:
+
+   `curl http://localhost:8080/api/admin/index`
+
+1. If you have custom metadata blocks installed, you must update your Solr `schema.xml` to include your custom fields.
+
+   For details, please see https://guides.dataverse.org/en/6.0/admin/metadatacustomization.html#updating-the-solr-schema
+
+   At a high level you will be copying custom fields from the output of http://localhost:8080/api/admin/index/solr/schema or using a script to automate this.
+
+## Potential Archiver Incompatibilities with Payara 6
+
+The [Google Cloud and DuraCloud archivers](https://guides.dataverse.org/en/5.14/installation/config.html#bagit-export) may not work in Dataverse 6.0.
+
+This is due to the archivers' dependence on libraries that include classes in `javax.* packages` that are no longer available. If these classes are actually used when the archivers run, the archivers would fail. As these two archivers require additional setup, they have not been tested in 6.0. Community members using these archivers or considering their use are encouraged to test them with 6.0 and report any errors and/or provide fixes for them that can be included in future releases.
+
+## Bug Fix for Dataset Templates with Custom Terms of Use
+
+A bug was fixed for the following scenario:
+
+- Create a template with custom terms.
+- Set that template as the default.
+- Try to create a dataset.
+- A 500 error appears before the form to create dataset is even shown.
+
+For more details, see issue #9825 and PR #9892
+
+## Complete List of Changes
+
+For the complete list of code changes in this release, see the [6.0 Milestone](https://github.com/IQSS/dataverse/milestone/109?closed=1) in GitHub.
+
+## Getting Help
+
+For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org.
diff --git a/doc/shib/shib.md b/doc/shib/shib.md
index 2c178a93f35..9cff6d827e7 100644
--- a/doc/shib/shib.md
+++ b/doc/shib/shib.md
@@ -82,11 +82,7 @@ Run `service httpd restart`.
 
 ## Update/verify files under /etc/shibboleth
 
-For /etc/shibboleth/shibboleth2.xml use the version from https://github.com/IQSS/dataverse/blob/master/conf/vagrant/etc/shibboleth/shibboleth2.xml but replace "pdurbin.pagekite.me" with the "shibtest.dataverse.org".
-
-Put https://github.com/IQSS/dataverse/blob/master/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml at /etc/shibboleth/dataverse-idp-metadata.xml
-
-Put https://github.com/IQSS/dataverse/blob/master/conf/vagrant/etc/shibboleth/attribute-map.xml at 
+Get files from the Installation Guide.
 
 After making these changes, run `service shibd restart` and `service httpd restart`.
 
diff --git a/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml b/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml
index 4f338905751..26144544d9e 100644
--- a/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml
+++ b/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml
@@ -1,8 +1,8 @@
 # currently no other option but to have daily logs and have year-month-day format in the name with
 # 4-digit year and 2-digit month and day
-# /usr/local/payara5/glassfish/domains/domain1/logs/counter_2019-01-11.log
+# /usr/local/payara6/glassfish/domains/domain1/logs/counter_2019-01-11.log
 #log_name_pattern: sample_logs/counter_(yyyy-mm-dd).log
-log_name_pattern: /usr/local/payara5/glassfish/domains/domain1/logs/mdc/counter_(yyyy-mm-dd).log
+log_name_pattern: /usr/local/payara6/glassfish/domains/domain1/logs/mdc/counter_(yyyy-mm-dd).log
 
 # path_types regular expressions allow matching to classify page urls as either an investigation or request
 # based on specific URL structure for your system.
diff --git a/scripts/vagrant/counter-processor-config.yaml b/doc/sphinx-guides/source/_static/developers/counter-processor-config.yaml
similarity index 100%
rename from scripts/vagrant/counter-processor-config.yaml
rename to doc/sphinx-guides/source/_static/developers/counter-processor-config.yaml
diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.root b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.root
index 1de94331523..b9ef9960318 100755
--- a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.root
+++ b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.root
@@ -4,7 +4,7 @@
 
 set -e
 
-ASADMIN=/usr/local/payara5/bin/asadmin
+ASADMIN=/usr/local/payara6/bin/asadmin
 
 case "$1" in
   start)
diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.service b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.service
index 7c457e615d8..19bb190e740 100755
--- a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.service
+++ b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.service
@@ -3,7 +3,7 @@
 # description: Payara App Server
 set -e
 
-ASADMIN=/usr/local/payara5/bin/asadmin
+ASADMIN=/usr/local/payara6/bin/asadmin
 APP_SERVER_USER=dataverse
 
 case "$1" in
diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr
index 7ca04cdff3f..9cf8902eb14 100755
--- a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr
+++ b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr
@@ -5,7 +5,7 @@
 # chkconfig: 35 92 08
 # description: Starts and stops Apache Solr
 
-SOLR_DIR="/usr/local/solr/solr-8.11.1"
+SOLR_DIR="/usr/local/solr/solr-9.3.0"
 SOLR_COMMAND="bin/solr"
 SOLR_ARGS="-m 1g -j jetty.host=127.0.0.1"
 SOLR_USER=solr
diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/payara.service b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/payara.service
index c8c82f6d6b2..c8efcb9c6f9 100644
--- a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/payara.service
+++ b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/payara.service
@@ -4,9 +4,9 @@ After = syslog.target network.target
 
 [Service]
 Type = forking
-ExecStart = /usr/bin/java -jar /usr/local/payara5/glassfish/lib/client/appserver-cli.jar start-domain
-ExecStop = /usr/bin/java -jar /usr/local/payara5/glassfish/lib/client/appserver-cli.jar stop-domain
-ExecReload = /usr/bin/java -jar /usr/local/payara5/glassfish/lib/client/appserver-cli.jar restart-domain
+ExecStart = /usr/bin/java -jar /usr/local/payara6/glassfish/lib/client/appserver-cli.jar start-domain
+ExecStop = /usr/bin/java -jar /usr/local/payara6/glassfish/lib/client/appserver-cli.jar stop-domain
+ExecReload = /usr/bin/java -jar /usr/local/payara6/glassfish/lib/client/appserver-cli.jar restart-domain
 User=dataverse
 LimitNOFILE=32768
 Environment="LANG=en_US.UTF-8"
diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service
index d89ee108377..0b8a8528490 100644
--- a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service
+++ b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service
@@ -5,9 +5,9 @@ After = syslog.target network.target remote-fs.target nss-lookup.target
 [Service]
 User = solr
 Type = forking
-WorkingDirectory = /usr/local/solr/solr-8.11.1
-ExecStart = /usr/local/solr/solr-8.11.1/bin/solr start -m 1g -j "jetty.host=127.0.0.1"
-ExecStop = /usr/local/solr/solr-8.11.1/bin/solr stop
+WorkingDirectory = /usr/local/solr/solr-9.3.0
+ExecStart = /usr/local/solr/solr-9.3.0/bin/solr start -m 1g -j "jetty.host=127.0.0.1"
+ExecStop = /usr/local/solr/solr-9.3.0/bin/solr stop
 LimitNOFILE=65000
 LimitNPROC=65000
 Restart=on-failure
diff --git a/doc/sphinx-guides/source/_static/installation/files/usr/local/payara5/glassfish/domains/domain1/config/logging.properties b/doc/sphinx-guides/source/_static/installation/files/usr/local/payara5/glassfish/domains/domain1/config/logging.properties
deleted file mode 100644
index 4054c794452..00000000000
--- a/doc/sphinx-guides/source/_static/installation/files/usr/local/payara5/glassfish/domains/domain1/config/logging.properties
+++ /dev/null
@@ -1,166 +0,0 @@
-#
-# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
-#
-# Copyright (c) 2013 Oracle and/or its affiliates. All rights reserved.
-#
-# The contents of this file are subject to the terms of either the GNU
-# General Public License Version 2 only ("GPL") or the Common Development
-# and Distribution License("CDDL") (collectively, the "License").  You
-# may not use this file except in compliance with the License.  You can
-# obtain a copy of the License at
-# https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html
-# or packager/legal/LICENSE.txt.  See the License for the specific
-# language governing permissions and limitations under the License.
-#
-# When distributing the software, include this License Header Notice in each
-# file and include the License file at packager/legal/LICENSE.txt.
-#
-# GPL Classpath Exception:
-# Oracle designates this particular file as subject to the "Classpath"
-# exception as provided by Oracle in the GPL Version 2 section of the License
-# file that accompanied this code.
-#
-# Modifications:
-# If applicable, add the following below the License Header, with the fields
-# enclosed by brackets [] replaced by your own identifying information:
-# "Portions Copyright [year] [name of copyright owner]"
-#
-# Contributor(s):
-# If you wish your version of this file to be governed by only the CDDL or
-# only the GPL Version 2, indicate your decision by adding "[Contributor]
-# elects to include this software in this distribution under the [CDDL or GPL
-# Version 2] license."  If you don't indicate a single choice of license, a
-# recipient has the option to distribute your version of this file under
-# either the CDDL, the GPL Version 2 or to extend the choice of license to
-# its licensees as provided above.  However, if you add GPL Version 2 code
-# and therefore, elected the GPL Version 2 license, then the option applies
-# only if the new code is made subject to such option by the copyright
-# holder.
-#
-# Portions Copyright [2016-2021] [Payara Foundation and/or its affiliates]
-
-#GlassFish logging.properties list
-#Update June 13 2012
-
-#All attributes details
-handlers=java.util.logging.ConsoleHandler
-handlerServices=com.sun.enterprise.server.logging.GFFileHandler,com.sun.enterprise.server.logging.SyslogHandler
-java.util.logging.ConsoleHandler.formatter=com.sun.enterprise.server.logging.UniformLogFormatter
-java.util.logging.FileHandler.count=1
-java.util.logging.FileHandler.formatter=java.util.logging.XMLFormatter
-java.util.logging.FileHandler.limit=50000
-java.util.logging.FileHandler.pattern=%h/java%u.log
-com.sun.enterprise.server.logging.GFFileHandler.compressOnRotation=false
-com.sun.enterprise.server.logging.GFFileHandler.excludeFields=
-com.sun.enterprise.server.logging.GFFileHandler.file=${com.sun.aas.instanceRoot}/logs/server.log
-com.sun.enterprise.server.logging.GFFileHandler.flushFrequency=1
-com.sun.enterprise.server.logging.GFFileHandler.formatter=com.sun.enterprise.server.logging.ODLLogFormatter
-com.sun.enterprise.server.logging.GFFileHandler.level=ALL
-com.sun.enterprise.server.logging.GFFileHandler.logStandardStreams=true
-com.sun.enterprise.server.logging.GFFileHandler.logtoConsole=false
-com.sun.enterprise.server.logging.GFFileHandler.logtoFile=true
-com.sun.enterprise.server.logging.GFFileHandler.maxHistoryFiles=0
-com.sun.enterprise.server.logging.GFFileHandler.multiLineMode=true
-com.sun.enterprise.server.logging.GFFileHandler.retainErrorsStasticsForHours=0
-com.sun.enterprise.server.logging.GFFileHandler.rotationLimitInBytes=2000000
-com.sun.enterprise.server.logging.GFFileHandler.rotationOnDateChange=false
-com.sun.enterprise.server.logging.GFFileHandler.rotationTimelimitInMinutes=0
-com.sun.enterprise.server.logging.SyslogHandler.level=ALL
-com.sun.enterprise.server.logging.SyslogHandler.useSystemLogging=false
-log4j.logger.org.hibernate.validator.util.Version=warn
-com.sun.enterprise.server.logging.UniformLogFormatter.ansiColor=true
-
-#Payara Notification logging properties
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.compressOnRotation=false
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.file=${com.sun.aas.instanceRoot}/logs/notification.log
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.formatter=com.sun.enterprise.server.logging.ODLLogFormatter
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.logtoFile=true
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.maxHistoryFiles=0
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.rotationLimitInBytes=2000000
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.rotationOnDateChange=false
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.rotationTimelimitInMinutes=0
-fish.payara.deprecated.jsonlogformatter.underscoreprefix=false
-
-#All log level details
-
-.level=INFO
-ShoalLogger.level=CONFIG
-com.hazelcast.level=WARNING
-java.util.logging.ConsoleHandler.level=FINEST
-javax.enterprise.resource.corba.level=INFO
-javax.enterprise.resource.javamail.level=INFO
-javax.enterprise.resource.jdo.level=INFO
-javax.enterprise.resource.jms.level=INFO
-javax.enterprise.resource.jta.level=INFO
-javax.enterprise.resource.resourceadapter.level=INFO
-javax.enterprise.resource.sqltrace.level=FINE
-javax.enterprise.resource.webcontainer.jsf.application.level=INFO
-javax.enterprise.resource.webcontainer.jsf.config.level=INFO
-javax.enterprise.resource.webcontainer.jsf.context.level=INFO
-javax.enterprise.resource.webcontainer.jsf.facelets.level=INFO
-javax.enterprise.resource.webcontainer.jsf.lifecycle.level=INFO
-javax.enterprise.resource.webcontainer.jsf.managedbean.level=INFO
-javax.enterprise.resource.webcontainer.jsf.renderkit.level=INFO
-javax.enterprise.resource.webcontainer.jsf.resource.level=INFO
-javax.enterprise.resource.webcontainer.jsf.taglib.level=INFO
-javax.enterprise.resource.webcontainer.jsf.timing.level=INFO
-javax.enterprise.system.container.cmp.level=INFO
-javax.enterprise.system.container.ejb.level=INFO
-javax.enterprise.system.container.ejb.mdb.level=INFO
-javax.enterprise.system.container.web.level=INFO
-javax.enterprise.system.core.classloading.level=INFO
-javax.enterprise.system.core.config.level=INFO
-javax.enterprise.system.core.level=INFO
-javax.enterprise.system.core.security.level=INFO
-javax.enterprise.system.core.selfmanagement.level=INFO
-javax.enterprise.system.core.transaction.level=INFO
-javax.enterprise.system.level=INFO
-javax.enterprise.system.ssl.security.level=INFO
-javax.enterprise.system.tools.admin.level=INFO
-javax.enterprise.system.tools.backup.level=INFO
-javax.enterprise.system.tools.deployment.common.level=WARNING
-javax.enterprise.system.tools.deployment.dol.level=WARNING
-javax.enterprise.system.tools.deployment.level=INFO
-javax.enterprise.system.util.level=INFO
-javax.enterprise.system.webservices.registry.level=INFO
-javax.enterprise.system.webservices.rpc.level=INFO
-javax.enterprise.system.webservices.saaj.level=INFO
-javax.level=INFO
-javax.mail.level=INFO
-javax.org.glassfish.persistence.level=INFO
-org.apache.catalina.level=INFO
-org.apache.coyote.level=INFO
-org.apache.jasper.level=INFO
-org.eclipse.persistence.session.level=INFO
-org.glassfish.admingui.level=INFO
-org.glassfish.naming.level=INFO
-org.jvnet.hk2.osgiadapter.level=INFO
-
-javax.enterprise.resource.corba.level=INFO
-javax.enterprise.resource.jta.level=INFO
-javax.enterprise.system.webservices.saaj.level=INFO
-javax.enterprise.system.container.ejb.level=INFO
-javax.enterprise.system.container.ejb.mdb.level=INFO
-javax.enterprise.resource.javamail.level=INFO
-javax.enterprise.system.webservices.rpc.level=INFO
-javax.enterprise.system.container.web.level=INFO
-javax.enterprise.resource.jms.level=INFO
-javax.enterprise.system.webservices.registry.level=INFO
-javax.enterprise.resource.webcontainer.jsf.application.level=INFO
-javax.enterprise.resource.webcontainer.jsf.resource.level=INFO
-javax.enterprise.resource.webcontainer.jsf.config.level=INFO
-javax.enterprise.resource.webcontainer.jsf.context.level=INFO
-javax.enterprise.resource.webcontainer.jsf.facelets.level=INFO
-javax.enterprise.resource.webcontainer.jsf.lifecycle.level=INFO
-javax.enterprise.resource.webcontainer.jsf.managedbean.level=INFO
-javax.enterprise.resource.webcontainer.jsf.renderkit.level=INFO
-javax.enterprise.resource.webcontainer.jsf.taglib.level=INFO
-javax.enterprise.resource.webcontainer.jsf.timing.level=INFO
-javax.org.glassfish.persistence.level=INFO
-javax.enterprise.system.tools.backup.level=INFO
-javax.mail.level=INFO
-org.glassfish.admingui.level=INFO
-org.glassfish.naming.level=INFO
-org.eclipse.persistence.session.level=INFO
-javax.enterprise.system.tools.deployment.dol.level=WARNING
-javax.enterprise.system.tools.deployment.common.level=WARNING
diff --git a/doc/sphinx-guides/source/_static/util/clear_timer.sh b/doc/sphinx-guides/source/_static/util/clear_timer.sh
index 1d9966e4e07..641b2695084 100755
--- a/doc/sphinx-guides/source/_static/util/clear_timer.sh
+++ b/doc/sphinx-guides/source/_static/util/clear_timer.sh
@@ -8,7 +8,7 @@
 # if you'd like to avoid that.
 
 # directory where Payara is installed
-PAYARA_DIR=/usr/local/payara5
+PAYARA_DIR=/usr/local/payara6
 
 # directory within Payara (defaults)
 DV_DIR=${PAYARA_DIR}/glassfish/domains/domain1
diff --git a/doc/sphinx-guides/source/_static/util/counter_daily.sh b/doc/sphinx-guides/source/_static/util/counter_daily.sh
index a12439d9cf8..674972b18f2 100644
--- a/doc/sphinx-guides/source/_static/util/counter_daily.sh
+++ b/doc/sphinx-guides/source/_static/util/counter_daily.sh
@@ -1,7 +1,7 @@
 #! /bin/bash
 
 COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-0.1.04"
-MDC_LOG_DIRECTORY="/usr/local/payara5/glassfish/domains/domain1/logs/mdc"
+MDC_LOG_DIRECTORY="/usr/local/payara6/glassfish/domains/domain1/logs/mdc"
 
 # counter_daily.sh
 
diff --git a/doc/sphinx-guides/source/admin/harvestclients.rst b/doc/sphinx-guides/source/admin/harvestclients.rst
index da922459f46..59fc4dc2c64 100644
--- a/doc/sphinx-guides/source/admin/harvestclients.rst
+++ b/doc/sphinx-guides/source/admin/harvestclients.rst
@@ -35,8 +35,8 @@ For example:
 
 .. code-block:: bash
 
-  sudo touch /usr/local/payara5/glassfish/domains/domain1/logs/stopharvest_bigarchive.70916
-  sudo chown dataverse /usr/local/payara5/glassfish/domains/domain1/logs/stopharvest_bigarchive.70916
+  sudo touch /usr/local/payara6/glassfish/domains/domain1/logs/stopharvest_bigarchive.70916
+  sudo chown dataverse /usr/local/payara6/glassfish/domains/domain1/logs/stopharvest_bigarchive.70916
 
 Note: If the application server is stopped and restarted, any running harvesting jobs will be killed but may remain marked as in progress in the database. We thus recommend using the mechanism here to stop ongoing harvests prior to a server restart.
 
@@ -44,6 +44,6 @@ Note: If the application server is stopped and restarted, any running harvesting
 What if a Run Fails?
 ~~~~~~~~~~~~~~~~~~~~
 
-Each harvesting client run logs a separate file per run to the app server's default logging directory (``/usr/local/payara5/glassfish/domains/domain1/logs/`` unless you've changed it). Look for filenames in the format  ``harvest_TARGET_YYYY_MM_DD_timestamp.log`` to get a better idea of what's going wrong.
+Each harvesting client run logs a separate file per run to the app server's default logging directory (``/usr/local/payara6/glassfish/domains/domain1/logs/`` unless you've changed it). Look for filenames in the format  ``harvest_TARGET_YYYY_MM_DD_timestamp.log`` to get a better idea of what's going wrong.
 
 Note that you'll want to run a minimum of Dataverse Software 4.6, optimally 4.18 or beyond, for the best OAI-PMH interoperability.
diff --git a/doc/sphinx-guides/source/admin/make-data-count.rst b/doc/sphinx-guides/source/admin/make-data-count.rst
index ec6ff9a685b..fe32af6649a 100644
--- a/doc/sphinx-guides/source/admin/make-data-count.rst
+++ b/doc/sphinx-guides/source/admin/make-data-count.rst
@@ -72,10 +72,9 @@ Enable or Disable Display of Make Data Count Metrics
 
 By default, when MDC logging is enabled (when ``:MDCLogPath`` is set), your Dataverse installation will display MDC metrics instead of it's internal (legacy) metrics. You can avoid this (e.g. to collect MDC metrics for some period of time before starting to display them) by setting ``:DisplayMDCMetrics`` to false.
 
+The following discussion assumes ``:MDCLogPath`` has been set to ``/usr/local/payara6/glassfish/domains/domain1/logs/mdc``
 You can also decide to display MDC metrics along with Dataverse's traditional download counts from the time before MDC was enabled. To do this, set the :ref:`:MDCStartDate` to when you started MDC logging.
 
-The following discussion assumes ``:MDCLogPath`` has been set to ``/usr/local/payara5/glassfish/domains/domain1/logs/mdc``
-
 Configure Counter Processor
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
@@ -104,7 +103,7 @@ Soon we will be setting up a cron job to run nightly but we start with a single
 
 * If you are running Counter Processor for the first time in the middle of a month, you will need create blank log files for the previous days. e.g.:
 
-  * ``cd /usr/local/payara5/glassfish/domains/domain1/logs/mdc``
+  * ``cd /usr/local/payara6/glassfish/domains/domain1/logs/mdc``
 
   * ``touch counter_2019-02-01.log``
   
diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst
index cac051ddb59..4f737bd730b 100644
--- a/doc/sphinx-guides/source/admin/metadatacustomization.rst
+++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst
@@ -413,13 +413,10 @@ Setting Up a Dev Environment for Testing
 
 You have several options for setting up a dev environment for testing metadata block changes:
 
-- Vagrant: See the :doc:`/developers/tools` section of the Developer Guide.
-- docker-aio: See https://github.com/IQSS/dataverse/tree/develop/conf/docker-aio
+- Docker: See :doc:`/container/index`.
 - AWS deployment: See the :doc:`/developers/deployment` section of the Developer Guide.
 - Full dev environment: See the :doc:`/developers/dev-environment` section of the Developer Guide.
 
-To get a clean environment in Vagrant, you'll be running ``vagrant destroy``. In Docker, you'll use ``docker rm``. For a full dev environment or AWS installation, you might find ``rebuild`` and related scripts at ``scripts/deploy/phoenix.dataverse.org`` useful.
-
 Editing TSV files
 ~~~~~~~~~~~~~~~~~
 
@@ -516,7 +513,7 @@ the Solr schema configuration, including any enabled metadata schemas:
 
 ``curl "http://localhost:8080/api/admin/index/solr/schema"``
 
-You can use :download:`update-fields.sh <../../../../conf/solr/8.11.1/update-fields.sh>` to easily add these to the
+You can use :download:`update-fields.sh <../../../../conf/solr/9.3.0/update-fields.sh>` to easily add these to the
 Solr schema you installed for your Dataverse installation.
 
 The script needs a target XML file containing your Solr schema. (See the :doc:`/installation/prerequisites/` section of
@@ -540,7 +537,7 @@ from some place else than your Dataverse installation).
 Please note that reconfigurations of your Solr index might require a re-index. Usually release notes indicate
 a necessary re-index, but for your custom metadata you will need to keep track on your own.
 
-Please note also that if you are going to make a pull request updating ``conf/solr/8.11.1/schema.xml`` with fields you have
+Please note also that if you are going to make a pull request updating ``conf/solr/9.3.0/schema.xml`` with fields you have
 added, you should first load all the custom metadata blocks in ``scripts/api/data/metadatablocks`` (including ones you
 don't care about) to create a complete list of fields. (This might change in the future.)
 
diff --git a/doc/sphinx-guides/source/admin/troubleshooting.rst b/doc/sphinx-guides/source/admin/troubleshooting.rst
index 9f085ba90cd..acbdcaae17e 100644
--- a/doc/sphinx-guides/source/admin/troubleshooting.rst
+++ b/doc/sphinx-guides/source/admin/troubleshooting.rst
@@ -53,15 +53,13 @@ Long-Running Ingest Jobs Have Exhausted System Resources
 
 Ingest is both CPU- and memory-intensive, and depending on your system resources and the size and format of tabular data files uploaded, may render your Dataverse installation unresponsive or nearly inoperable. It is possible to cancel these jobs by purging the ingest queue.
 
-``/usr/local/payara5/mq/bin/imqcmd -u admin query dst -t q -n DataverseIngest`` will query the DataverseIngest destination. The password, unless you have changed it, matches the username.
+``/usr/local/payara6/mq/bin/imqcmd -u admin query dst -t q -n DataverseIngest`` will query the DataverseIngest destination. The password, unless you have changed it, matches the username.
 
-``/usr/local/payara5/mq/bin/imqcmd -u admin purge dst -t q -n DataverseIngest`` will purge the DataverseIngest queue, and prompt for your confirmation.
+``/usr/local/payara6/mq/bin/imqcmd -u admin purge dst -t q -n DataverseIngest`` will purge the DataverseIngest queue, and prompt for your confirmation.
 
 Finally, list destinations to verify that the purge was successful:
 
-``/usr/local/payara5/mq/bin/imqcmd -u admin list dst``
-
-If you are still running Glassfish, substitute glassfish4 for payara5 above. If you have installed your Dataverse installation in some other location, adjust the above paths accordingly.
+``/usr/local/payara6/mq/bin/imqcmd -u admin list dst``
 
 .. _troubleshooting-payara:
 
@@ -73,7 +71,7 @@ Payara
 Finding the Payara Log File
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-``/usr/local/payara5/glassfish/domains/domain1/logs/server.log`` is the main place to look when you encounter problems (assuming you installed Payara in the default directory). Hopefully an error message has been logged. If there's a stack trace, it may be of interest to developers, especially they can trace line numbers back to a tagged version or commit. Send more of the stack trace (the entire file if possible) to developers who can help (see "Getting Help", below) and be sure to say which version of the Dataverse Software you have installed.
+``/usr/local/payara6/glassfish/domains/domain1/logs/server.log`` is the main place to look when you encounter problems (assuming you installed Payara in the default directory). Hopefully an error message has been logged. If there's a stack trace, it may be of interest to developers, especially they can trace line numbers back to a tagged version or commit. Send more of the stack trace (the entire file if possible) to developers who can help (see "Getting Help", below) and be sure to say which version of the Dataverse Software you have installed.
 
 .. _increase-payara-logging:
 
diff --git a/doc/sphinx-guides/source/api/getting-started.rst b/doc/sphinx-guides/source/api/getting-started.rst
index 544f0921bd7..a6f6c259a25 100644
--- a/doc/sphinx-guides/source/api/getting-started.rst
+++ b/doc/sphinx-guides/source/api/getting-started.rst
@@ -11,7 +11,7 @@ Servers You Can Test With
 
 Rather than using a production Dataverse installation, API users are welcome to use http://demo.dataverse.org for testing. You can email support@dataverse.org if you have any trouble with this server.  
 
-If you would rather have full control over your own test server, deployments to AWS, Docker, Vagrant, and more are covered in the :doc:`/developers/index` and the :doc:`/installation/index`.
+If you would rather have full control over your own test server, deployments to AWS, Docker, and more are covered in the :doc:`/developers/index` and the :doc:`/installation/index`.
 
 Getting an API Token
 --------------------
diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py
index 2c2ddf1bdf6..7ff17eb45ed 100755
--- a/doc/sphinx-guides/source/conf.py
+++ b/doc/sphinx-guides/source/conf.py
@@ -66,9 +66,9 @@
 # built documents.
 #
 # The short X.Y version.
-version = '5.14'
+version = '6.0'
 # The full version, including alpha/beta/rc tags.
-release = '5.14'
+release = '6.0'
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
diff --git a/doc/sphinx-guides/source/container/base-image.rst b/doc/sphinx-guides/source/container/base-image.rst
index ed06314ef0e..1a47a8fc413 100644
--- a/doc/sphinx-guides/source/container/base-image.rst
+++ b/doc/sphinx-guides/source/container/base-image.rst
@@ -41,7 +41,7 @@ Image Contents
 
 The base image provides:
 
-- `Eclipse Temurin JRE using Java 11 <https://adoptium.net/temurin/releases?version=11>`_
+- `Eclipse Temurin JRE using Java 17 <https://adoptium.net/temurin/releases?version=17>`_
 - `Payara Community Application Server <https://docs.payara.fish/community>`_
 - CLI tools necessary to run Dataverse (i. e. ``curl`` or ``jq`` - see also :doc:`../installation/prerequisites` in Installation Guide)
 - Linux tools for analysis, monitoring and so on
diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst
index 3fbe55766d5..04c7eba7913 100644
--- a/doc/sphinx-guides/source/container/dev-usage.rst
+++ b/doc/sphinx-guides/source/container/dev-usage.rst
@@ -9,17 +9,7 @@ Please note! This Docker setup is not for production!
 Quickstart
 ----------
 
-First, install Java 11 and Maven.
-
-After cloning the repo, try this:
-
-``mvn -Pct clean package docker:run``
-
-After some time you should be able to log in:
-
-- url: http://localhost:8080
-- username: dataverseAdmin
-- password: admin1
+See :ref:`container-dev-quickstart`.
 
 Intro
 -----
@@ -172,7 +162,7 @@ restart the application container:
 
 Using ``docker container inspect dev_dataverse | grep Image`` you can verify the changed checksums.
 
-Using A Debugger
+Using a Debugger
 ----------------
 
 The :doc:`base-image` enables usage of the `Java Debugging Wire Protocol <https://dzone.com/articles/remote-debugging-java-applications-with-jdwp>`_
@@ -183,3 +173,8 @@ There are a lot of tutorials how to connect your IDE's debugger to a remote endp
 as the endpoint. Here are links to the most common IDEs docs on remote debugging:
 `Eclipse <https://help.eclipse.org/latest/topic/org.eclipse.jdt.doc.user/concepts/cremdbug.htm?cp=1_2_12>`_,
 `IntelliJ <https://www.jetbrains.com/help/idea/tutorial-remote-debug.html#debugger_rc>`_
+
+Building Your Own Base Image
+----------------------------
+
+If you find yourself tasked with upgrading Payara, you will need to create your own base image before running the :ref:`container-dev-quickstart`. For instructions, see :doc:`base-image`.
diff --git a/doc/sphinx-guides/source/developers/big-data-support.rst b/doc/sphinx-guides/source/developers/big-data-support.rst
index b238a7623eb..04885571a01 100644
--- a/doc/sphinx-guides/source/developers/big-data-support.rst
+++ b/doc/sphinx-guides/source/developers/big-data-support.rst
@@ -173,6 +173,8 @@ See also :ref:`Globus settings <:GlobusBasicToken>`.
 Data Capture Module (DCM)
 -------------------------
 
+Please note: The DCM feature is deprecated.
+
 Data Capture Module (DCM) is an experimental component that allows users to upload large datasets via rsync over ssh.
 
 DCM was developed and tested using Glassfish but these docs have been updated with references to Payara.
@@ -209,7 +211,7 @@ The JSON that a DCM sends to your Dataverse installation on successful checksum
    :language: json
 
 - ``status`` - The valid strings to send are ``validation passed`` and ``validation failed``.
-- ``uploadFolder`` - This is the directory on disk where your Dataverse installation should attempt to find the files that a DCM has moved into place. There should always be a ``files.sha`` file and a least one data file. ``files.sha`` is a manifest of all the data files and their checksums. The ``uploadFolder`` directory is inside the directory where data is stored for the dataset and may have the same name as the "identifier" of the persistent id (DOI or Handle). For example, you would send ``"uploadFolder": "DNXV2H"`` in the JSON file when the absolute path to this directory is ``/usr/local/payara5/glassfish/domains/domain1/files/10.5072/FK2/DNXV2H/DNXV2H``.
+- ``uploadFolder`` - This is the directory on disk where your Dataverse installation should attempt to find the files that a DCM has moved into place. There should always be a ``files.sha`` file and a least one data file. ``files.sha`` is a manifest of all the data files and their checksums. The ``uploadFolder`` directory is inside the directory where data is stored for the dataset and may have the same name as the "identifier" of the persistent id (DOI or Handle). For example, you would send ``"uploadFolder": "DNXV2H"`` in the JSON file when the absolute path to this directory is ``/usr/local/payara6/glassfish/domains/domain1/files/10.5072/FK2/DNXV2H/DNXV2H``.
 - ``totalSize`` - Your Dataverse installation will use this value to represent the total size in bytes of all the files in the "package" that's created. If 360 data files and one ``files.sha`` manifest file are in the ``uploadFolder``, this value is the sum of the 360 data files.
 
 
@@ -231,9 +233,9 @@ Add Dataverse Installation settings to use mock (same as using DCM, noted above)
 
 At this point you should be able to download a placeholder rsync script. Your Dataverse installation is then waiting for news from the DCM about if checksum validation has succeeded or not. First, you have to put files in place, which is usually the job of the DCM. You should substitute "X1METO" for the "identifier" of the dataset you create. You must also use the proper path for where you store files in your dev environment.
 
-- ``mkdir /usr/local/payara5/glassfish/domains/domain1/files/10.5072/FK2/X1METO``
-- ``mkdir /usr/local/payara5/glassfish/domains/domain1/files/10.5072/FK2/X1METO/X1METO``
-- ``cd /usr/local/payara5/glassfish/domains/domain1/files/10.5072/FK2/X1METO/X1METO``
+- ``mkdir /usr/local/payara6/glassfish/domains/domain1/files/10.5072/FK2/X1METO``
+- ``mkdir /usr/local/payara6/glassfish/domains/domain1/files/10.5072/FK2/X1METO/X1METO``
+- ``cd /usr/local/payara6/glassfish/domains/domain1/files/10.5072/FK2/X1METO/X1METO``
 - ``echo "hello" > file1.txt``
 - ``shasum file1.txt > files.sha``
 
@@ -248,104 +250,11 @@ The following low level command should only be used when troubleshooting the "im
 
 ``curl -H "X-Dataverse-key: $API_TOKEN" -X POST "$DV_BASE_URL/api/batch/jobs/import/datasets/files/$DATASET_DB_ID?uploadFolder=$UPLOAD_FOLDER&totalSize=$TOTAL_SIZE"``
 
-Steps to set up a DCM via Docker for Development
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-If you need a fully operating DCM client for development purposes, these steps will guide you to setting one up. This includes steps to set up the DCM on S3 variant.
-
-Docker Image Set-up
-^^^^^^^^^^^^^^^^^^^
-
-See https://github.com/IQSS/dataverse/blob/develop/conf/docker-dcm/readme.md
-
-- Install docker if you do not have it
-      
-Optional steps for setting up the S3 Docker DCM Variant
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-- Before: the default bucket for DCM to hold files in S3 is named test-dcm. It is coded into `post_upload_s3.bash` (line 30). Change to a different bucket if needed.
-- Also Note: With the new support for multiple file store in the Dataverse Software, DCM requires a store with id="s3" and DCM will only work with this store.
-
-  - Add AWS bucket info to dcmsrv
-    - Add AWS credentials to ``~/.aws/credentials``
-
-      - ``[default]``
-      - ``aws_access_key_id =``
-      - ``aws_secret_access_key =``
-
-- Dataverse installation configuration (on dvsrv):
-
-  - Set S3 as the storage driver
-
-    - ``cd /opt/payara5/bin/``
-    - ``./asadmin delete-jvm-options "\-Ddataverse.files.storage-driver-id=file"``
-    - ``./asadmin create-jvm-options "\-Ddataverse.files.storage-driver-id=s3"``
-    - ``./asadmin create-jvm-options "\-Ddataverse.files.s3.type=s3"``
-    - ``./asadmin create-jvm-options "\-Ddataverse.files.s3.label=s3"``
-    
-
-  - Add AWS bucket info to your Dataverse installation
-    - Add AWS credentials to ``~/.aws/credentials``
-    
-      - ``[default]``
-      - ``aws_access_key_id =``
-      - ``aws_secret_access_key =``
-
-    - Also: set region in ``~/.aws/config`` to create a region file. Add these contents:
-
-      - ``[default]``
-      - ``region = us-east-1``
-
-  - Add the S3 bucket names to your Dataverse installation
-
-    - S3 bucket for your Dataverse installation
-
-      - ``/usr/local/payara5/glassfish/bin/asadmin create-jvm-options "-Ddataverse.files.s3.bucket-name=iqsstestdcmbucket"``
-
-    - S3 bucket for DCM (as your Dataverse installation needs to do the copy over)
-
-      - ``/usr/local/payara5/glassfish/bin/asadmin create-jvm-options "-Ddataverse.files.dcm-s3-bucket-name=test-dcm"``
-
-  - Set download method to be HTTP, as DCM downloads through S3 are over this protocol ``curl -X PUT "http://localhost:8080/api/admin/settings/:DownloadMethods" -d "native/http"``
-
-Using the DCM Docker Containers
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-For using these commands, you will need to connect to the shell prompt inside various containers (e.g. ``docker exec -it dvsrv /bin/bash``)
-
-- Create a dataset and download rsync upload script
-
-  - connect to client container: ``docker exec -it dcm_client bash``
-  - create dataset: ``cd /mnt ; ./create.bash`` ; this will echo the database ID to stdout
-  - download transfer script: ``./get_transfer.bash $database_id_from_create_script``
-  - execute the transfer script: ``bash ./upload-${database_id_from-create_script}.bash`` , and follow instructions from script.
-
-- Run script
-
-  - e.g. ``bash ./upload-3.bash`` (``3`` being the database id from earlier commands in this example).
-
-- Manually run post upload script on dcmsrv
-
-  - for posix implementation: ``docker exec -it dcmsrv /opt/dcm/scn/post_upload.bash``
-  - for S3 implementation: ``docker exec -it dcmsrv /opt/dcm/scn/post_upload_s3.bash``
-
-Additional DCM docker development tips
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-- You can completely blow away all the docker images with these commands (including non DCM ones!)
-  - ``docker-compose -f docmer-compose.yml down -v``
-
-- There are a few logs to tail
-
-  - dvsrv : ``tail -n 2000 -f /opt/payara5/glassfish/domains/domain1/logs/server.log``
-  - dcmsrv : ``tail -n 2000 -f /var/log/lighttpd/breakage.log``
-  - dcmsrv : ``tail -n 2000 -f /var/log/lighttpd/access.log``
-
-- You may have to restart the app server domain occasionally to deal with memory filling up. If deployment is getting reallllllly slow, its a good time.
-
 Repository Storage Abstraction Layer (RSAL)
 -------------------------------------------
 
+Please note: The RSAL feature is deprecated.
+
 Steps to set up a DCM via Docker for Development
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
diff --git a/doc/sphinx-guides/source/developers/classic-dev-env.rst b/doc/sphinx-guides/source/developers/classic-dev-env.rst
new file mode 100755
index 00000000000..062a1bb36f3
--- /dev/null
+++ b/doc/sphinx-guides/source/developers/classic-dev-env.rst
@@ -0,0 +1,266 @@
+=======================
+Classic Dev Environment
+=======================
+
+These are the old instructions we used for Dataverse 4 and 5. They should still work but these days we favor running Dataverse in Docker as described in :doc:`dev-environment`.
+
+These instructions are purposefully opinionated and terse to help you get your development environment up and running as quickly as possible! Please note that familiarity with running commands from the terminal is assumed.
+
+.. contents:: |toctitle|
+	:local:
+
+Quick Start (Docker)
+--------------------
+
+The quickest way to get Dataverse running is in Docker as explained in :doc:`../container/dev-usage` section of the Container Guide.
+
+
+Classic Dev Environment
+-----------------------
+
+Since before Docker existed, we have encouraged installing Dataverse and all its dependencies directly on your development machine, as described below. This can be thought of as the "classic" development environment for Dataverse.
+
+However, in 2023 we decided that we'd like to encourage all developers to start using Docker instead and opened https://github.com/IQSS/dataverse/issues/9616 to indicate that we plan to rewrite this page to recommend the use of Docker.
+
+There's nothing wrong with the classic instructions below and we don't plan to simply delete them. They are a valid alternative to running Dataverse in Docker. We will likely move them to another page.
+
+Set Up Dependencies
+-------------------
+
+Supported Operating Systems
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Mac OS X or Linux is required because the setup scripts assume the presence of standard Unix utilities.
+
+Windows is gaining support through Docker as described in the :doc:`windows` section.
+
+Install Java
+~~~~~~~~~~~~
+
+The Dataverse Software requires Java 11.
+
+We suggest downloading OpenJDK from https://adoptopenjdk.net
+
+On Linux, you are welcome to use the OpenJDK available from package managers.
+
+Install Netbeans or Maven
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+NetBeans IDE is recommended, and can be downloaded from http://netbeans.org . Developers may use any editor or IDE. We recommend NetBeans because it is free, works cross platform, has good support for Jakarta EE projects, and includes a required build tool, Maven.
+
+Below we describe how to build the Dataverse Software war file with Netbeans but if you prefer to use only Maven, you can find installation instructions in the :doc:`tools` section.
+
+Install Homebrew (Mac Only)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+On Mac, install Homebrew to simplify the steps below: https://brew.sh
+
+Clone the Dataverse Software Git Repo
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Fork https://github.com/IQSS/dataverse and then clone your fork like this:
+
+``git clone git@github.com:[YOUR GITHUB USERNAME]/dataverse.git``
+
+Build the Dataverse Software War File
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you installed Netbeans, follow these steps:
+
+- Launch Netbeans and click "File" and then "Open Project". Navigate to where you put the Dataverse Software code and double-click "Dataverse" to open the project.
+- If you see "resolve project problems," go ahead and let Netbeans try to resolve them. This will probably including downloading dependencies, which can take a while.
+- Allow Netbeans to install nb-javac (required for Java 8 and below).
+- Select "Dataverse" under Projects and click "Run" in the menu and then "Build Project (Dataverse)". Check back for "BUILD SUCCESS" at the end.
+
+If you installed Maven instead of Netbeans, run ``mvn package``. Check for "BUILD SUCCESS" at the end.
+
+NOTE: Do you use a locale different than ``en_US.UTF-8`` on your development machine? Are you in a different timezone
+than Harvard (Eastern Time)? You might experience issues while running tests that were written with these settings
+in mind. The Maven  ``pom.xml`` tries to handle this for you by setting the locale to ``en_US.UTF-8`` and timezone
+``UTC``, but more, not yet discovered building or testing problems might lurk in the shadows.
+
+Install jq
+~~~~~~~~~~
+
+On Mac, run this command:
+
+``brew install jq``
+
+On Linux, install ``jq`` from your package manager or download a binary from http://stedolan.github.io/jq/
+
+Install Payara
+~~~~~~~~~~~~~~
+
+Payara 6.2023.8 or higher is required.
+
+To install Payara, run the following commands:
+
+``cd /usr/local``
+
+``sudo curl -O -L https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip``
+
+``sudo unzip payara-6.2023.8.zip``
+
+``sudo chown -R $USER /usr/local/payara6``
+
+If nexus.payara.fish is ever down for maintenance, Payara distributions are also available from https://repo1.maven.org/maven2/fish/payara/distributions/payara/
+
+Install Service Dependencies Directly on localhost
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Install PostgreSQL
+^^^^^^^^^^^^^^^^^^
+
+The Dataverse Software has been tested with PostgreSQL versions up to 13. PostgreSQL version 10+ is required.
+
+On Mac, go to https://www.postgresql.org/download/macosx/ and choose "Interactive installer by EDB" option. Note that version 13.5 is used in the command line examples below, but the process should be similar for other versions. When prompted to set a password for the "database superuser (postgres)" just enter "password".
+
+After installation is complete, make a backup of the ``pg_hba.conf`` file like this:
+
+``sudo cp /Library/PostgreSQL/13/data/pg_hba.conf /Library/PostgreSQL/13/data/pg_hba.conf.orig``
+
+Then edit ``pg_hba.conf`` with an editor such as vi:
+
+``sudo vi /Library/PostgreSQL/13/data/pg_hba.conf``
+
+In the "METHOD" column, change all instances of "scram-sha-256" (or whatever is in that column) to "trust". This will make it so PostgreSQL doesn't require a password.
+
+In the Finder, click "Applications" then "PostgreSQL 13" and launch the "Reload Configuration" app. Click "OK" after you see "server signaled".
+
+Next, to confirm the edit worked, launch the "pgAdmin" application from the same folder. Under "Browser", expand "Servers" and double click "PostgreSQL 13". When you are prompted for a password, leave it blank and click "OK". If you have successfully edited "pg_hba.conf", you can get in without a password.
+
+On Linux, you should just install PostgreSQL using your favorite package manager, such as ``yum``. (Consult the PostgreSQL section of :doc:`/installation/prerequisites` in the main Installation guide for more info and command line examples). Find ``pg_hba.conf`` and set the authentication method to "trust" and restart PostgreSQL.
+
+Install Solr
+^^^^^^^^^^^^
+
+`Solr <http://lucene.apache.org/solr/>`_ 9.3.0 is required.
+
+To install Solr, execute the following commands:
+
+``sudo mkdir /usr/local/solr``
+
+``sudo chown $USER /usr/local/solr``
+
+``cd /usr/local/solr``
+
+``curl -O http://archive.apache.org/dist/solr/solr/9.3.0/solr-9.3.0.tgz``
+
+``tar xvfz solr-9.3.0.tgz``
+
+``cd solr-9.3.0/server/solr``
+
+``cp -r configsets/_default collection1``
+
+``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/9.3.0/schema.xml``
+
+``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/9.3.0/schema_dv_mdb_fields.xml``
+
+``mv schema*.xml collection1/conf``
+
+``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/9.3.0/solrconfig.xml``
+
+``mv solrconfig.xml collection1/conf/solrconfig.xml``
+
+``cd /usr/local/solr/solr-9.3.0``
+
+(Please note that the extra jetty argument below is a security measure to limit connections to Solr to only your computer. For extra security, run a firewall.)
+
+``bin/solr start -j "-Djetty.host=127.0.0.1"``
+
+``bin/solr create_core -c collection1 -d server/solr/collection1/conf``
+
+Install Service Dependencies Using Docker Compose
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+To avoid having to install service dependencies like PostgreSQL or Solr directly on your localhost, there is the alternative of using the ``docker-compose-dev.yml`` file available in the repository root. For this option you need to have Docker and Docker Compose installed on your machine.
+
+The ``docker-compose-dev.yml`` can be configured to only run the service dependencies necessary to support a Dataverse installation running directly on localhost. In addition to PostgreSQL and Solr, it also runs a SMTP server.
+
+Before running the Docker Compose file, you need to update the value of the ``DATAVERSE_DB_USER`` environment variable to ``postgres``. The variable can be found inside the ``.env`` file in the repository root. This step is required as the Dataverse installation script expects that database user.
+
+To run the Docker Compose file, go to the Dataverse repository root, then run:
+
+``docker-compose -f docker-compose-dev.yml up -d --scale dev_dataverse=0``
+
+Note that this command omits the Dataverse container defined in the Docker Compose file, since Dataverse is going to be installed directly on localhost in the next section.
+
+The command runs the containers in detached mode, but if you want to run them attached and thus view container logs in real time, remove the ``-d`` option from the command.
+
+Data volumes of each dependency will be persisted inside the ``docker-dev-volumes`` folder, inside the repository root.
+
+If you want to stop the containers, then run (for detached mode only, otherwise use ``Ctrl + C``):
+
+``docker-compose -f docker-compose-dev.yml stop``
+
+If you want to remove the containers, then run:
+
+``docker-compose -f docker-compose-dev.yml down``
+
+If you want to run a single container (the mail server, for example) then run:
+
+``docker-compose -f docker-compose-dev.yml up dev_smtp``
+
+For a fresh installation, and before running the Software Installer Script, it is recommended to delete the docker-dev-env folder to avoid installation problems due to existing data in the containers.
+
+Run the Dataverse Software Installer Script
+-------------------------------------------
+
+Navigate to the directory where you cloned the Dataverse Software git repo change directories to the ``scripts/installer`` directory like this:
+
+``cd scripts/installer``
+
+Create a Python virtual environment, activate it, then install dependencies:
+
+``python3 -m venv venv``
+
+``source venv/bin/activate``
+
+``pip install psycopg2-binary``
+
+The installer will try to connect to the SMTP server you tell it to use. If you haven't used the Docker Compose option for setting up the dependencies, or you don't have a mail server handy, you can run ``nc -l 25`` in another terminal and choose "localhost" (the default) to get past this check.
+
+Finally, run the installer (see also :download:`README_python.txt <../../../../scripts/installer/README_python.txt>` if necessary):
+
+``python3 install.py``
+
+Verify the Dataverse Software is Running
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+After the script has finished, you should be able to log into your Dataverse installation with the following credentials:
+
+- http://localhost:8080
+- username: dataverseAdmin
+- password: admin
+
+Configure Your Development Environment for Publishing
+-----------------------------------------------------
+
+Run the following command:
+
+``curl http://localhost:8080/api/admin/settings/:DoiProvider -X PUT -d FAKE``
+
+This will disable DOI registration by using a fake (in-code) DOI provider. Please note that this feature is only available in Dataverse Software 4.10+ and that at present, the UI will give no indication that the DOIs thus minted are fake.
+
+Developers may also wish to consider using :ref:`PermaLinks <permalinks>`
+
+Configure Your Development Environment for GUI Edits
+----------------------------------------------------
+
+Out of the box, a JSF setting is configured for production use and prevents edits to the GUI (xhtml files) from being visible unless you do a full deployment.
+
+It is recommended that you run the following command so that simply saving the xhtml file in Netbeans is enough for the change to show up.
+
+``asadmin create-system-properties "dataverse.jsf.refresh-period=1"``
+
+For more on JSF settings like this, see :ref:`jsf-config`.
+
+Next Steps
+----------
+
+If you can log in to the Dataverse installation, great! If not, please see the :doc:`troubleshooting` section. For further assistance, please see "Getting Help" in the :doc:`intro` section.
+
+You're almost ready to start hacking on code. Now that the installer script has you up and running, you need to continue on to the :doc:`tips` section to get set up to deploy code from your IDE or the command line.
+
+----
+
+Previous: :doc:`intro` | Next: :doc:`tips`
diff --git a/doc/sphinx-guides/source/developers/containers.rst b/doc/sphinx-guides/source/developers/containers.rst
index b42f7f5a2e2..175b178b455 100755
--- a/doc/sphinx-guides/source/developers/containers.rst
+++ b/doc/sphinx-guides/source/developers/containers.rst
@@ -25,11 +25,6 @@ The primary community-lead projects (which the core team is drawing inspiration
 - https://github.com/IQSS/dataverse-docker
 - https://github.com/IQSS/dataverse-kubernetes (especially the https://github.com/EOSC-synergy/dataverse-kubernetes fork)
 
-Deprecated Projects
--------------------
-
-The :doc:`testing` section mentions using docker-aio for integration tests. We do not plan to keep this project alive.
-
 Using Containers for Reproducible Research
 ------------------------------------------
 
diff --git a/doc/sphinx-guides/source/developers/debugging.rst b/doc/sphinx-guides/source/developers/debugging.rst
index 2088afe5521..50e8901b1ff 100644
--- a/doc/sphinx-guides/source/developers/debugging.rst
+++ b/doc/sphinx-guides/source/developers/debugging.rst
@@ -20,8 +20,8 @@ during development without recompiling. Changing the options will require at lea
 how you get these options in. (Variable substitution only happens during deployment and when using system properties
 or environment variables, you'll need to pass these into the domain, which usually will require an app server restart.)
 
-Please note that since Payara 5.2021.1 supporting MicroProfile Config 2.0, you can
-`use profiles <https://download.eclipse.org/microprofile/microprofile-config-2.0/microprofile-config-spec-2.0.html#configprofile>`_
+Please note you can use
+`MicroProfile Config <https://download.eclipse.org/microprofile/microprofile-config-2.0/microprofile-config-spec-2.0.html#configprofile>`_
 to maintain your settings more easily for different environments.
 
 .. list-table::
diff --git a/doc/sphinx-guides/source/developers/dependencies.rst b/doc/sphinx-guides/source/developers/dependencies.rst
index 65edfa3ffac..0208c49f90a 100644
--- a/doc/sphinx-guides/source/developers/dependencies.rst
+++ b/doc/sphinx-guides/source/developers/dependencies.rst
@@ -344,8 +344,7 @@ Repositories
 ------------
 
 Maven receives all dependencies from *repositories*. These can be public like `Maven Central <https://search.maven.org/>`_
-and others, but you can also use a private repository on premises or in the cloud. Last but not least, you can use
-local repositories, which can live next to your application code (see ``local_lib`` dir within the Dataverse Software codebase).
+and others, but you can also use a private repository on premises or in the cloud.
 
 Repositories are defined within the Dataverse Software POM like this:
 
@@ -364,11 +363,6 @@ Repositories are defined within the Dataverse Software POM like this:
             <url>http://repository.primefaces.org</url>
             <layout>default</layout>
         </repository>
-        <repository>
-            <id>dvn.private</id>
-            <name>Local repository for hosting jars not available from network repositories.</name>
-            <url>file://${project.basedir}/local_lib</url>
-        </repository>
     </repositories>
 
 You can also add repositories to your local Maven settings, see `docs <https://maven.apache.org/ref/3.6.0/maven-settings/settings.html>`_.
diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst
index b3f7fb1c1af..1301994cc82 100755
--- a/doc/sphinx-guides/source/developers/dev-environment.rst
+++ b/doc/sphinx-guides/source/developers/dev-environment.rst
@@ -2,263 +2,81 @@
 Development Environment
 =======================
 
-These instructions are purposefully opinionated and terse to help you get your development environment up and running as quickly as possible! Please note that familiarity with running commands from the terminal is assumed.
+These instructions are oriented around Docker but the "classic" instructions we used for Dataverse 4 and 5 are still available at :doc:`classic-dev-env`.
 
 .. contents:: |toctitle|
 	:local:
 
-Quick Start (Docker)
---------------------
+.. _container-dev-quickstart:
 
-The quickest way to get Dataverse running is in Docker as explained in :doc:`../container/dev-usage` section of the Container Guide.
-
-
-Classic Dev Environment
------------------------
-
-Since before Docker existed, we have encouraged installing Dataverse and all its dependencies directly on your development machine, as described below. This can be thought of as the "classic" development environment for Dataverse.
+Quickstart
+----------
 
-However, in 2023 we decided that we'd like to encourage all developers to start using Docker instead and opened https://github.com/IQSS/dataverse/issues/9616 to indicate that we plan to rewrite this page to recommend the use of Docker.
+First, install Java 17, Maven, and Docker.
 
-There's nothing wrong with the classic instructions below and we don't plan to simply delete them. They are a valid alternative to running Dataverse in Docker. We will likely move them to another page.
+After cloning the `dataverse repo <https://github.com/IQSS/dataverse>`_, run this:
 
-Set Up Dependencies
--------------------
+``mvn -Pct clean package docker:run``
 
-Supported Operating Systems
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
+After some time you should be able to log in:
 
-Mac OS X or Linux is required because the setup scripts assume the presence of standard Unix utilities.
+- url: http://localhost:8080
+- username: dataverseAdmin
+- password: admin1
 
-Windows is gaining support through Docker as described in the :doc:`windows` section.
+Detailed Steps
+--------------
 
 Install Java
 ~~~~~~~~~~~~
 
-The Dataverse Software requires Java 11.
+The Dataverse Software requires Java 17.
 
-We suggest downloading OpenJDK from https://adoptopenjdk.net
+On Mac and Windows, we suggest downloading OpenJDK from https://adoptium.net (formerly `AdoptOpenJDK <https://adoptopenjdk.net>`_) or `SDKMAN <https://sdkman.io>`_.
 
 On Linux, you are welcome to use the OpenJDK available from package managers.
 
-Install Netbeans or Maven
-~~~~~~~~~~~~~~~~~~~~~~~~~
+Install Maven
+~~~~~~~~~~~~~
 
-NetBeans IDE is recommended, and can be downloaded from http://netbeans.org . Developers may use any editor or IDE. We recommend NetBeans because it is free, works cross platform, has good support for Jakarta EE projects, and includes a required build tool, Maven.
+Follow instructions at https://maven.apache.org
 
-Below we describe how to build the Dataverse Software war file with Netbeans but if you prefer to use only Maven, you can find installation instructions in the :doc:`tools` section.
+Install and Start Docker
+~~~~~~~~~~~~~~~~~~~~~~~~
 
-Install Homebrew (Mac Only)
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Follow instructions at https://www.docker.com
 
-On Mac, install Homebrew to simplify the steps below: https://brew.sh
+Be sure to start Docker.
 
-Clone the Dataverse Software Git Repo
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Git Clone Repo
+~~~~~~~~~~~~~~
 
 Fork https://github.com/IQSS/dataverse and then clone your fork like this:
 
 ``git clone git@github.com:[YOUR GITHUB USERNAME]/dataverse.git``
 
-Build the Dataverse Software War File
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-If you installed Netbeans, follow these steps:
-
-- Launch Netbeans and click "File" and then "Open Project". Navigate to where you put the Dataverse Software code and double-click "Dataverse" to open the project.
-- If you see "resolve project problems," go ahead and let Netbeans try to resolve them. This will probably including downloading dependencies, which can take a while.
-- Allow Netbeans to install nb-javac (required for Java 8 and below).
-- Select "Dataverse" under Projects and click "Run" in the menu and then "Build Project (Dataverse)". Check back for "BUILD SUCCESS" at the end.
-
-If you installed Maven instead of Netbeans, run ``mvn package``. Check for "BUILD SUCCESS" at the end.
-
-NOTE: Do you use a locale different than ``en_US.UTF-8`` on your development machine? Are you in a different timezone
-than Harvard (Eastern Time)? You might experience issues while running tests that were written with these settings
-in mind. The Maven  ``pom.xml`` tries to handle this for you by setting the locale to ``en_US.UTF-8`` and timezone
-``UTC``, but more, not yet discovered building or testing problems might lurk in the shadows.
-
-Install jq
-~~~~~~~~~~
-
-On Mac, run this command:
-
-``brew install jq``
-
-On Linux, install ``jq`` from your package manager or download a binary from http://stedolan.github.io/jq/
-
-Install Payara
-~~~~~~~~~~~~~~
-
-Payara 5.2022.3 or higher is required.
-
-To install Payara, run the following commands:
-
-``cd /usr/local``
-
-``sudo curl -O -L https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/5.2022.3/payara-5.2022.3.zip``
-
-``sudo unzip payara-5.2022.3.zip``
-
-``sudo chown -R $USER /usr/local/payara5``
-
-If nexus.payara.fish is ever down for maintenance, Payara distributions are also available from https://repo1.maven.org/maven2/fish/payara/distributions/payara/
-
-Install Service Dependencies Directly on localhost
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Install PostgreSQL
-^^^^^^^^^^^^^^^^^^
-
-The Dataverse Software has been tested with PostgreSQL versions up to 13. PostgreSQL version 10+ is required.
-
-On Mac, go to https://www.postgresql.org/download/macosx/ and choose "Interactive installer by EDB" option. Note that version 13.5 is used in the command line examples below, but the process should be similar for other versions. When prompted to set a password for the "database superuser (postgres)" just enter "password".
-
-After installation is complete, make a backup of the ``pg_hba.conf`` file like this:
-
-``sudo cp /Library/PostgreSQL/13/data/pg_hba.conf /Library/PostgreSQL/13/data/pg_hba.conf.orig``
-
-Then edit ``pg_hba.conf`` with an editor such as vi:
-
-``sudo vi /Library/PostgreSQL/13/data/pg_hba.conf``
-
-In the "METHOD" column, change all instances of "scram-sha-256" (or whatever is in that column) to "trust". This will make it so PostgreSQL doesn't require a password.
-
-In the Finder, click "Applications" then "PostgreSQL 13" and launch the "Reload Configuration" app. Click "OK" after you see "server signaled".
-
-Next, to confirm the edit worked, launch the "pgAdmin" application from the same folder. Under "Browser", expand "Servers" and double click "PostgreSQL 13". When you are prompted for a password, leave it blank and click "OK". If you have successfully edited "pg_hba.conf", you can get in without a password.
-
-On Linux, you should just install PostgreSQL using your favorite package manager, such as ``yum``. (Consult the PostgreSQL section of :doc:`/installation/prerequisites` in the main Installation guide for more info and command line examples). Find ``pg_hba.conf`` and set the authentication method to "trust" and restart PostgreSQL.
-
-Install Solr
-^^^^^^^^^^^^
-
-`Solr <http://lucene.apache.org/solr/>`_ 8.11.1 is required.
-
-To install Solr, execute the following commands:
-
-``sudo mkdir /usr/local/solr``
-
-``sudo chown $USER /usr/local/solr``
-
-``cd /usr/local/solr``
-
-``curl -O http://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz``
-
-``tar xvfz solr-8.11.1.tgz``
-
-``cd solr-8.11.1/server/solr``
-
-``cp -r configsets/_default collection1``
+Build and Run
+~~~~~~~~~~~~~
 
-``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/8.11.1/schema.xml``
+Change into the ``dataverse`` directory you just cloned and run the following command:
 
-``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/8.11.1/schema_dv_mdb_fields.xml``
+``mvn -Pct clean package docker:run``
 
-``mv schema*.xml collection1/conf``
+Verify 
+~~~~~~
 
-``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/8.11.1/solrconfig.xml``
+After some time you should be able to log in:
 
-``mv solrconfig.xml collection1/conf/solrconfig.xml``
-
-``cd /usr/local/solr/solr-8.11.1``
-
-(Please note that the extra jetty argument below is a security measure to limit connections to Solr to only your computer. For extra security, run a firewall.)
-
-``bin/solr start -j "-Djetty.host=127.0.0.1"``
-
-``bin/solr create_core -c collection1 -d server/solr/collection1/conf``
-
-Install Service Dependencies Using Docker Compose
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-To avoid having to install service dependencies like PostgreSQL or Solr directly on your localhost, there is the alternative of using the ``docker-compose-dev.yml`` file available in the repository root. For this option you need to have Docker and Docker Compose installed on your machine.
-
-The ``docker-compose-dev.yml`` can be configured to only run the service dependencies necessary to support a Dataverse installation running directly on localhost. In addition to PostgreSQL and Solr, it also runs a SMTP server.
-
-Before running the Docker Compose file, you need to update the value of the ``DATAVERSE_DB_USER`` environment variable to ``postgres``. The variable can be found inside the ``.env`` file in the repository root. This step is required as the Dataverse installation script expects that database user.
-
-To run the Docker Compose file, go to the Dataverse repository root, then run:
-
-``docker-compose -f docker-compose-dev.yml up -d --scale dev_dataverse=0``
-
-Note that this command omits the Dataverse container defined in the Docker Compose file, since Dataverse is going to be installed directly on localhost in the next section.
-
-The command runs the containers in detached mode, but if you want to run them attached and thus view container logs in real time, remove the ``-d`` option from the command.
-
-Data volumes of each dependency will be persisted inside the ``docker-dev-volumes`` folder, inside the repository root.
-
-If you want to stop the containers, then run (for detached mode only, otherwise use ``Ctrl + C``):
-
-``docker-compose -f docker-compose-dev.yml stop``
-
-If you want to remove the containers, then run:
-
-``docker-compose -f docker-compose-dev.yml down``
-
-If you want to run a single container (the mail server, for example) then run:
-
-``docker-compose -f docker-compose-dev.yml up dev_smtp``
-
-For a fresh installation, and before running the Software Installer Script, it is recommended to delete the docker-dev-env folder to avoid installation problems due to existing data in the containers.
-
-Run the Dataverse Software Installer Script
--------------------------------------------
-
-Navigate to the directory where you cloned the Dataverse Software git repo change directories to the ``scripts/installer`` directory like this:
-
-``cd scripts/installer``
-
-Create a Python virtual environment, activate it, then install dependencies:
-
-``python3 -m venv venv``
-
-``source venv/bin/activate``
-
-``pip install psycopg2-binary``
-
-The installer will try to connect to the SMTP server you tell it to use. If you haven't used the Docker Compose option for setting up the dependencies, or you don't have a mail server handy, you can run ``nc -l 25`` in another terminal and choose "localhost" (the default) to get past this check.
-
-Finally, run the installer (see also :download:`README_python.txt <../../../../scripts/installer/README_python.txt>` if necessary):
-
-``python3 install.py``
-
-Verify the Dataverse Software is Running
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-After the script has finished, you should be able to log into your Dataverse installation with the following credentials:
-
-- http://localhost:8080
+- url: http://localhost:8080
 - username: dataverseAdmin
-- password: admin
-
-Configure Your Development Environment for Publishing
------------------------------------------------------
-
-Run the following command:
-
-``curl http://localhost:8080/api/admin/settings/:DoiProvider -X PUT -d FAKE``
-
-This will disable DOI registration by using a fake (in-code) DOI provider. Please note that this feature is only available in Dataverse Software 4.10+ and that at present, the UI will give no indication that the DOIs thus minted are fake.
-
-Developers may also wish to consider using :ref:`PermaLinks <permalinks>`
-
-Configure Your Development Environment for GUI Edits
-----------------------------------------------------
-
-Out of the box, a JSF setting is configured for production use and prevents edits to the GUI (xhtml files) from being visible unless you do a full deployment.
-
-It is recommended that you run the following command so that simply saving the xhtml file in Netbeans is enough for the change to show up.
-
-``asadmin create-system-properties "dataverse.jsf.refresh-period=1"``
-
-For more on JSF settings like this, see :ref:`jsf-config`.
-
-Next Steps
-----------
+- password: admin1
 
-If you can log in to the Dataverse installation, great! If not, please see the :doc:`troubleshooting` section. For further assistance, please see "Getting Help" in the :doc:`intro` section.
+More Information
+----------------
 
-You're almost ready to start hacking on code. Now that the installer script has you up and running, you need to continue on to the :doc:`tips` section to get set up to deploy code from your IDE or the command line.
+See also the :doc:`/container/dev-usage` section of the Container Guide.
 
-----
+Getting Help
+------------
 
-Previous: :doc:`intro` | Next: :doc:`tips`
+Please feel free to reach out at https://chat.dataverse.org or https://groups.google.com/g/dataverse-dev if you have any difficulty setting up a dev environment!
diff --git a/doc/sphinx-guides/source/developers/index.rst b/doc/sphinx-guides/source/developers/index.rst
index d70b682fcda..3ac9e955ea2 100755
--- a/doc/sphinx-guides/source/developers/index.rst
+++ b/doc/sphinx-guides/source/developers/index.rst
@@ -27,6 +27,7 @@ Developer Guide
    deployment
    containers
    making-releases
+   making-library-releases
    metadataexport
    tools
    unf/index
@@ -41,4 +42,5 @@ Developer Guide
    dataset-migration-api 
    workflows
    fontcustom
+   classic-dev-env
    
diff --git a/doc/sphinx-guides/source/developers/intro.rst b/doc/sphinx-guides/source/developers/intro.rst
index 7f4e8c1ba34..4a64c407fc1 100755
--- a/doc/sphinx-guides/source/developers/intro.rst
+++ b/doc/sphinx-guides/source/developers/intro.rst
@@ -52,7 +52,9 @@ Related Guides
 
 If you are a developer who wants to make use of the Dataverse Software APIs, please see the :doc:`/api/index`. If you have front-end UI questions, please see the :doc:`/style/index`.
 
-If you are a sysadmin who likes to code, you may be interested in hacking on installation scripts mentioned in the :doc:`/installation/index`. We validate the installation scripts with :doc:`/developers/tools` such as `Vagrant <http://vagrantup.com>`_ and Docker (see the :doc:`containers` section).
+If you are a sysadmin who likes to code, you may be interested in hacking on installation scripts mentioned in the :doc:`/installation/index`.
+
+If you are a Docker enthusiasts, please check out the :doc:`/container/index`.
 
 Related Projects
 ----------------
diff --git a/doc/sphinx-guides/source/developers/make-data-count.rst b/doc/sphinx-guides/source/developers/make-data-count.rst
index a3c0d10dc5e..8eaa5c0d7f8 100644
--- a/doc/sphinx-guides/source/developers/make-data-count.rst
+++ b/doc/sphinx-guides/source/developers/make-data-count.rst
@@ -30,15 +30,13 @@ Full Setup
 
 The recommended way to work on the Make Data Count feature is to spin up an EC2 instance that has both the Dataverse Software and Counter Processor installed. Go to the :doc:`deployment` page for details on how to spin up an EC2 instance and make sure that your Ansible file is configured to install Counter Processor before running the "create" script.
 
-(Alternatively, you can try installing Counter Processor in Vagrant. :download:`setup-counter-processor.sh <../../../../scripts/vagrant/setup-counter-processor.sh>` might help you get it installed.)
-
 After you have spun to your EC2 instance, set ``:MDCLogPath`` so that the Dataverse installation creates a log for Counter Processor to operate on. For more on this database setting, see the :doc:`/installation/config` section of the Installation Guide.
 
 Next you need to have the Dataverse installation add some entries to the log that Counter Processor will operate on. To do this, click on some published datasets and download some files.
 
-Next you should run Counter Processor to convert the log into a SUSHI report, which is in JSON format. Before running Counter Processor, you need to put a configuration file into place. As a starting point use :download:`counter-processor-config.yaml <../../../../scripts/vagrant/counter-processor-config.yaml>` and edit the file, paying particular attention to the following settings:
+Next you should run Counter Processor to convert the log into a SUSHI report, which is in JSON format. Before running Counter Processor, you need to put a configuration file into place. As a starting point use :download:`counter-processor-config.yaml <../_static/developers/counter-processor-config.yaml>` and edit the file, paying particular attention to the following settings:
 
-- ``log_name_pattern`` You might want something like ``/usr/local/payara5/glassfish/domains/domain1/logs/counter_(yyyy-mm-dd).log``
+- ``log_name_pattern`` You might want something like ``/usr/local/payara6/glassfish/domains/domain1/logs/counter_(yyyy-mm-dd).log``
 - ``year_month`` You should probably set this to the current month.
 - ``output_file`` This needs to be a directory that the "dataverse" Unix user can read but that the "counter" user can write to. In dev, you can probably get away with "/tmp" as the directory.
 - ``platform`` Out of the box from Counter Processor this is set to ``Dash`` but this should be changed to match the name of your Dataverse installation. Examples are "Harvard Dataverse Repository" for Harvard University or "LibraData" for the University of Virginia.
diff --git a/doc/sphinx-guides/source/developers/making-library-releases.rst b/doc/sphinx-guides/source/developers/making-library-releases.rst
new file mode 100755
index 00000000000..63b6eeb1c2a
--- /dev/null
+++ b/doc/sphinx-guides/source/developers/making-library-releases.rst
@@ -0,0 +1,93 @@
+=======================
+Making Library Releases
+=======================
+
+.. contents:: |toctitle|
+	:local:
+
+Introduction
+------------
+
+Note: See :doc:`making-releases` for Dataverse itself.
+
+We release Java libraries to Maven Central that are used by Dataverse (and perhaps `other <https://github.com/gdcc/xoai/issues/141>`_ `software <https://github.com/gdcc/xoai/issues/170>`_!):
+
+- https://central.sonatype.com/namespace/org.dataverse
+- https://central.sonatype.com/namespace/io.gdcc
+
+We release JavaScript/TypeScript libraries to npm:
+
+- https://www.npmjs.com/package/@iqss/dataverse-design-system
+
+Maven Central (Java)
+--------------------
+
+From the perspective of the Maven Central, we are both `producers <https://central.sonatype.org/publish/>`_ because we publish/release libraries there and `consumers <https://central.sonatype.org/consume/>`_ because we pull down those libraries (and many others) when we build Dataverse. 
+
+Releasing Existing Libraries to Maven Central
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+If you need to release an existing library, all the setup should be done already. The steps below assume that GitHub Actions are in place to do the heavy lifting for you, such as signing artifacts with GPG.
+
+Releasing a Snapshot Version to Maven Central
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`Snapshot <https://maven.apache.org/guides/getting-started/index.html#what-is-a-snapshot-version>`_ releases are published automatically through GitHub Actions (e.g. through a `snapshot workflow <https://github.com/gdcc/sword2-server/blob/main/.github/workflows/maven-snapshot.yml>`_ for the SWORD library) every time a pull request is merged (or the default branch, typically ``main``, is otherwise updated).
+
+That is to say, to make a snapshot release, you only need to get one or more commits into the default branch.
+
+Releasing a Release (Non-Snapshot) Version to Maven Central
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+From a pom.xml it may not be apparent that snapshots like ``6.0-SNAPSHOT`` might be changing under your feet. Browsing the snapshot repository (e.g. our `UNF 6.0-SNAPSHOT <https://s01.oss.sonatype.org/content/groups/staging/org/dataverse/unf/6.0-SNAPSHOT/>`_), may reveal versions changing over time. To finalize the code and stop it from changing, we publish/release what Maven calls a "`release version <https://maven.apache.org/guides/getting-started/index.html#what-is-a-snapshot-version>`_". This will remove ``-SNAPSHOT`` from the version (through an ``mvn`` command).
+
+Non-snapshot releases (`release <https://maven.apache.org/guides/getting-started/index.html#what-is-a-snapshot-version>`_ versions) are published automatically through GitHub Actions (e.g. through a `release workflow <https://github.com/gdcc/sword2-server/blob/main/.github/workflows/maven-release.yml>`_), kicked off locally by an ``mvn`` command that invokes the `Maven Release Plugin <https://maven.apache.org/maven-release/maven-release-plugin/>`_.
+
+First, run a clean:
+
+``mvn release:clean``
+
+Then run a prepare:
+
+``mvn release:prepare``
+
+The prepare step is interactive. You will be prompted for the following information:
+
+- the release version (e.g. `2.0.0 <https://repo.maven.apache.org/maven2/io/gdcc/sword2-server/2.0.0/>`_)
+- the git tag to create and push (e.g. `sword2-server-2.0.0 <https://github.com/gdcc/sword2-server/releases/tag/sword2-server-2.0.0>`_)
+- the next development (snapshot) version (e.g. `2.0.1-SNAPSHOT <https://s01.oss.sonatype.org/#nexus-search;checksum~47575aed5471adeb0a08a02098ce3a23a5778afb>`_)
+
+These examples from the SWORD library. Below is what to expect from the interactive session. In many cases, you can just hit enter to accept the defaults.
+
+.. code-block:: bash
+
+        [INFO] 5/17 prepare:map-release-versions
+        What is the release version for "SWORD v2 Common Server Library (forked)"? (sword2-server) 2.0.0: :
+        [INFO] 6/17 prepare:input-variables
+        What is the SCM release tag or label for "SWORD v2 Common Server Library (forked)"? (sword2-server) sword2-server-2.0.0: :
+        [INFO] 7/17 prepare:map-development-versions
+        What is the new development version for "SWORD v2 Common Server Library (forked)"? (sword2-server) 2.0.1-SNAPSHOT: :
+        [INFO] 8/17 prepare:rewrite-poms-for-release
+
+It can take some time for the jar to be visible on Maven Central. You can start by looking on the repo1 server, like this: https://repo1.maven.org/maven2/io/gdcc/sword2-server/2.0.0/
+
+Don't bother putting the new version in a pom.xml until you see it on repo1.
+
+Note that the next snapshot release should be available as well, like this: https://s01.oss.sonatype.org/content/groups/staging/io/gdcc/sword2-server/2.0.1-SNAPSHOT/ 
+
+Releasing a New Library to Maven Central
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+At a high level:
+
+- Use an existing pom.xml as a starting point.
+- Use existing GitHub Actions workflows as a starting point.
+- Create secrets in the new library's GitHub repo used by the workflow.
+- If you need an entire new namespace, look at previous issues such as https://issues.sonatype.org/browse/OSSRH-94575 and https://issues.sonatype.org/browse/OSSRH-94577
+
+npm (JavaScript/TypeScript)
+---------------------------
+
+Currently, publishing `@iqss/dataverse-design-system <https://www.npmjs.com/package/@iqss/dataverse-design-system>`_ to npm done manually. We plan to automate this as part of https://github.com/IQSS/dataverse-frontend/issues/140
+
+https://www.npmjs.com/package/js-dataverse is the previous 1.0 version of js-dataverse. No 1.x releases are planned. We plan to publish 2.0 (used by the new frontend) as discussed in https://github.com/IQSS/dataverse-frontend/issues/13
\ No newline at end of file
diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst
index a2575bb5f50..23c4773a06e 100755
--- a/doc/sphinx-guides/source/developers/making-releases.rst
+++ b/doc/sphinx-guides/source/developers/making-releases.rst
@@ -8,6 +8,8 @@ Making Releases
 Introduction
 ------------
 
+Note: See :doc:`making-library-releases` for how to publish our libraries to Maven Central. 
+
 See :doc:`version-control` for background on our branching strategy.
 
 The steps below describe making both regular releases and hotfix releases.
diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst
index c228d8e20ca..acaeccf4f23 100755
--- a/doc/sphinx-guides/source/developers/testing.rst
+++ b/doc/sphinx-guides/source/developers/testing.rst
@@ -47,12 +47,14 @@ Writing Unit Tests with JUnit
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 We are aware that there are newer testing tools such as TestNG, but we use `JUnit <http://junit.org>`_ because it's tried and true.
-We support both (legacy) JUnit 4.x tests (forming the majority of our tests) and
-newer JUnit 5 based testing.
+We support JUnit 5 based testing and require new tests written with it.
+(Since Dataverse 6.0, we migrated all of our tests formerly based on JUnit 4.)
 
-NOTE: When adding new tests, you should give JUnit 5 a go instead of adding more dependencies to JUnit 4.x.
-
-If writing tests is new to you, poke around existing unit tests which all end in ``Test.java`` and live under ``src/test``. Each test is annotated with ``@Test`` and should have at least one assertion which specifies the expected result. In Netbeans, you can run all the tests in it by clicking "Run" -> "Test File". From the test file, you should be able to navigate to the code that's being tested by right-clicking on the file and clicking "Navigate" -> "Go to Test/Tested class". Likewise, from the code, you should be able to use the same "Navigate" menu to go to the tests.
+If writing tests is new to you, poke around existing unit tests which all end in ``Test.java`` and live under ``src/test``.
+Each test is annotated with ``@Test`` and should have at least one assertion which specifies the expected result.
+In Netbeans, you can run all the tests in it by clicking "Run" -> "Test File".
+From the test file, you should be able to navigate to the code that's being tested by right-clicking on the file and clicking "Navigate" -> "Go to Test/Tested class".
+Likewise, from the code, you should be able to use the same "Navigate" menu to go to the tests.
 
 NOTE: Please remember when writing tests checking possibly localized outputs to check against ``en_US.UTF-8`` and ``UTC``
 l10n strings!
@@ -62,22 +64,24 @@ Refactoring Code to Make It Unit-Testable
 
 Existing code is not necessarily written in a way that lends itself to easy testing. Generally speaking, it is difficult to write unit tests for both JSF "backing" beans (which end in ``Page.java``) and "service" beans (which end in ``Service.java``) because they require the database to be running in order to test them. If service beans can be exercised via API they can be tested with integration tests (described below) but a good technique for making the logic testable it to move code to "util beans" (which end in ``Util.java``) that operate on Plain Old Java Objects (POJOs). ``PrivateUrlUtil.java`` is a good example of moving logic from ``PrivateUrlServiceBean.java`` to a "util" bean to make the code testable.
 
-Parameterized Tests and JUnit Theories
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Parameterized Tests
+^^^^^^^^^^^^^^^^^^^
+
 Often times you will want to test a method multiple times with similar values.
 In order to avoid test bloat (writing a test for every data combination),
 JUnit offers Data-driven unit tests. This allows a test to be run for each set
 of defined data values.
 
-JUnit 4 uses ``Parameterized.class`` and ``Theories.class``. For reference, take a look at issue https://github.com/IQSS/dataverse/issues/5619.
-
-JUnit 5 doesn't offer theories (see `jqwik <https://jqwik.net>`_ for this), but
-greatly extended parameterized testing. Some guidance how to write those:
+JUnit 5 offers great parameterized testing. Some guidance how to write those:
 
 - https://junit.org/junit5/docs/current/user-guide/#writing-tests-parameterized-tests
 - https://www.baeldung.com/parameterized-tests-junit-5
 - https://blog.codefx.org/libraries/junit-5-parameterized-tests/
-- See also some examples in our codebase.
+- See also many examples in our codebase.
+
+Note that JUnit 5 also offers support for custom test parameter resolvers. This enables keeping tests cleaner,
+as preparation might happen within some extension and the test code is more focused on the actual testing.
+See https://junit.org/junit5/docs/current/user-guide/#extensions-parameter-resolution for more information.
 
 JUnit 5 Test Helper Extensions
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -116,11 +120,14 @@ In addition, there is a writeup on "The Testable Command" at https://github.com/
 Running Non-Essential (Excluded) Unit Tests
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-You should be aware that some unit tests have been deemed "non-essential" and have been annotated with ``@Category(NonEssentialTests.class)`` and are excluded from the "dev" Maven profile, which is the default profile. All unit tests (that have not been annotated with ``@Ignore``), including these non-essential tests, are run from continuous integration systems such as Jenkins and GitHub Actions with the following ``mvn`` command that invokes a non-default profile:
+You should be aware that some unit tests have been deemed "non-essential" and have been annotated with ``@Tag(Tags.NOT_ESSENTIAL_UNITTESTS)`` and are excluded from the "dev" Maven profile, which is the default profile.
+All unit tests (that have not been annotated with ``@Disable``), including these non-essential tests, are run from continuous integration systems such as Jenkins and GitHub Actions with the following ``mvn`` command that invokes a non-default profile:
 
 ``mvn test -P all-unit-tests``
 
-Generally speaking, unit tests have been flagged as non-essential because they are slow or because they require an Internet connection. You should not feel obligated to run these tests continuously but you can use the ``mvn`` command above to run them. To iterate on the unit test in Netbeans and execute it with "Run -> Test File", you must temporarily comment out the annotation flagging the test as non-essential.
+Generally speaking, unit tests have been flagged as non-essential because they are slow or because they require an Internet connection.
+You should not feel obligated to run these tests continuously but you can use the ``mvn`` command above to run them.
+To iterate on the unit test in Netbeans and execute it with "Run -> Test File", you must temporarily comment out the annotation flagging the test as non-essential.
 
 Integration Tests
 -----------------
@@ -173,12 +180,7 @@ Finally, run the script:
 Running the full API test suite using Docker
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-.. note::
-    Sunsetting of this module is imminent.** There is no schedule yet, but expect it to go away.
-    Please let the `Dataverse Containerization Working Group <https://ct.gdcc.io>`_ know if you are a user and
-    what should be preserved.
-
-To run the full suite of integration tests on your laptop, we recommend using the "all in one" Docker configuration described in ``conf/docker-aio/readme.md`` in the root of the repo.
+To run the full suite of integration tests on your laptop, running Dataverse and its dependencies in Docker, as explained in the :doc:`/container/dev-usage` section of the Container Guide.
 
 Alternatively, you can run tests against the app server running on your laptop by following the "getting set up" steps below.
 
@@ -308,9 +310,9 @@ To run these tests, simply call out to Maven:
 Measuring Coverage of Integration Tests
 ---------------------------------------
 
-Measuring the code coverage of integration tests with Jacoco requires several steps. In order to make these steps clear we'll use "/usr/local/payara5" as the Payara directory and "dataverse" as the Payara Unix user.
+Measuring the code coverage of integration tests with Jacoco requires several steps. In order to make these steps clear we'll use "/usr/local/payara6" as the Payara directory and "dataverse" as the Payara Unix user.
 
-Please note that this was tested under Glassfish 4 but it is hoped that the same steps will work with Payara 5.
+Please note that this was tested under Glassfish 4 but it is hoped that the same steps will work with Payara.
 
 Add jacocoagent.jar to Payara
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -329,9 +331,9 @@ Note that we are running the following commands as the user "dataverse". In shor
   cd local/jacoco-0.8.1
   wget https://github.com/jacoco/jacoco/releases/download/v0.8.1/jacoco-0.8.1.zip
   unzip jacoco-0.8.1.zip
-  /usr/local/payara5/bin/asadmin stop-domain
-  cp /home/dataverse/local/jacoco-0.8.1/lib/jacocoagent.jar /usr/local/payara5/glassfish/lib
-  /usr/local/payara5/bin/asadmin start-domain
+  /usr/local/payara6/bin/asadmin stop-domain
+  cp /home/dataverse/local/jacoco-0.8.1/lib/jacocoagent.jar /usr/local/payara6/glassfish/lib
+  /usr/local/payara6/bin/asadmin start-domain
 
 Add jacococli.jar to the WAR File
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -354,21 +356,21 @@ Run this as the "dataverse" user.
 
 .. code-block:: bash
 
-  /usr/local/payara5/bin/asadmin deploy dataverse-jacoco.war
+  /usr/local/payara6/bin/asadmin deploy dataverse-jacoco.war
 
-Note that after deployment the file "/usr/local/payara5/glassfish/domains/domain1/config/jacoco.exec" exists and is empty.
+Note that after deployment the file "/usr/local/payara6/glassfish/domains/domain1/config/jacoco.exec" exists and is empty.
 
 Run Integration Tests
 ~~~~~~~~~~~~~~~~~~~~~
 
 Note that even though you see "docker-aio" in the command below, we assume you are not necessarily running the test suite within Docker. (Some day we'll probably move this script to another directory.) For this reason, we pass the URL with the normal port (8080) that app servers run on to the ``run-test-suite.sh`` script.
 
-Note that "/usr/local/payara5/glassfish/domains/domain1/config/jacoco.exec" will become non-empty after you stop and start Payara. You must stop and start Payara before every run of the integration test suite.
+Note that "/usr/local/payara6/glassfish/domains/domain1/config/jacoco.exec" will become non-empty after you stop and start Payara. You must stop and start Payara before every run of the integration test suite.
 
 .. code-block:: bash
 
-  /usr/local/payara5/bin/asadmin stop-domain
-  /usr/local/payara5/bin/asadmin start-domain
+  /usr/local/payara6/bin/asadmin stop-domain
+  /usr/local/payara6/bin/asadmin start-domain
   git clone https://github.com/IQSS/dataverse.git
   cd dataverse
   conf/docker-aio/run-test-suite.sh http://localhost:8080
@@ -383,7 +385,7 @@ Run these commands as the "dataverse" user. The ``cd dataverse`` means that you
 .. code-block:: bash
 
   cd dataverse
-  java -jar /home/dataverse/local/jacoco-0.8.1/lib/jacococli.jar report --classfiles target/classes --sourcefiles src/main/java --html target/coverage-it/ /usr/local/payara5/glassfish/domains/domain1/config/jacoco.exec
+  java -jar /home/dataverse/local/jacoco-0.8.1/lib/jacococli.jar report --classfiles target/classes --sourcefiles src/main/java --html target/coverage-it/ /usr/local/payara6/glassfish/domains/domain1/config/jacoco.exec
 
 Read Code Coverage Report
 ~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -504,7 +506,6 @@ Browser-Based Testing
 Installation Testing
 ~~~~~~~~~~~~~~~~~~~~
 
-- Run `vagrant up` on a server to test the installer
 - Work with @donsizemore to automate testing of https://github.com/GlobalDataverseCommunityConsortium/dataverse-ansible
 
 Future Work on Load/Performance Testing
diff --git a/doc/sphinx-guides/source/developers/tips.rst b/doc/sphinx-guides/source/developers/tips.rst
index bf75a05f84e..e1ee40cafa5 100755
--- a/doc/sphinx-guides/source/developers/tips.rst
+++ b/doc/sphinx-guides/source/developers/tips.rst
@@ -19,20 +19,20 @@ Undeploy the war File from the Dataverse Software Installation Script
 
 Because the initial deployment of the war file was done outside of Netbeans by the Dataverse Software installation script, it's a good idea to undeploy that war file to give Netbeans a clean slate to work with.
 
-Assuming you installed Payara in ``/usr/local/payara5``, run the following ``asadmin`` command to see the version of the Dataverse Software that the Dataverse Software installation script deployed:
+Assuming you installed Payara in ``/usr/local/payara6``, run the following ``asadmin`` command to see the version of the Dataverse Software that the Dataverse Software installation script deployed:
 
-``/usr/local/payara5/bin/asadmin list-applications``
+``/usr/local/payara6/bin/asadmin list-applications``
 
 You will probably see something like ``dataverse-5.0 <ejb, web>`` as the output. To undeploy, use whichever version you see like this:
 
-``/usr/local/payara5/bin/asadmin undeploy dataverse-5.0``
+``/usr/local/payara6/bin/asadmin undeploy dataverse-5.0``
 
 Now that Payara doesn't have anything deployed, we can proceed with getting Netbeans set up to deploy the code.
 
 Add Payara as a Server in Netbeans
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Launch Netbeans and click "Tools" and then "Servers". Click "Add Server" and select "Payara Server" and set the installation location to ``/usr/local/payara5``. The defaults are fine so you can click "Next" and "Finish".
+Launch Netbeans and click "Tools" and then "Servers". Click "Add Server" and select "Payara Server" and set the installation location to ``/usr/local/payara6``. The defaults are fine so you can click "Next" and "Finish".
 
 Please note that if you are on a Mac, Netbeans may be unable to start Payara due to proxy settings in Netbeans. Go to the "General" tab in Netbeans preferences and click "Test connection" to see if you are affected. If you get a green checkmark, you're all set. If you get a red exclamation mark, change "Proxy Settings" to "No Proxy" and retest. A more complicated answer having to do with changing network settings is available at https://discussions.apple.com/thread/7680039?answerId=30715103022#30715103022 and the bug is also described at https://netbeans.org/bugzilla/show_bug.cgi?id=268076
 
@@ -117,7 +117,7 @@ Deploying With ``asadmin``
 
 Sometimes you want to deploy code without using Netbeans or from the command line on a server you have ssh'ed into.
 
-For the ``asadmin`` commands below, we assume you have already changed directories to ``/usr/local/payara5/glassfish/bin`` or wherever you have installed Payara.
+For the ``asadmin`` commands below, we assume you have already changed directories to ``/usr/local/payara6/glassfish/bin`` or wherever you have installed Payara.
 
 There are four steps to this process:
 
diff --git a/doc/sphinx-guides/source/developers/tools.rst b/doc/sphinx-guides/source/developers/tools.rst
index cbd27d6e8d2..a21becd14cf 100755
--- a/doc/sphinx-guides/source/developers/tools.rst
+++ b/doc/sphinx-guides/source/developers/tools.rst
@@ -25,21 +25,6 @@ Maven
 
 With Maven installed you can run ``mvn package`` and ``mvn test`` from the command line. It can be downloaded from https://maven.apache.org
 
-.. _vagrant:
-
-Vagrant
-+++++++
-
-Vagrant allows you to spin up a virtual machine running the Dataverse Software on your development workstation. You'll need to install Vagrant from https://www.vagrantup.com and VirtualBox from https://www.virtualbox.org.
-
-We assume you have already cloned the repo from https://github.com/IQSS/dataverse as explained in the :doc:`/developers/dev-environment` section.
-
-From the root of the git repo (where the ``Vagrantfile`` is), run ``vagrant up`` and eventually you should be able to reach a Dataverse installation at http://localhost:8888 (the ``forwarded_port`` indicated in the ``Vagrantfile``).
-
-Please note that running ``vagrant up`` for the first time should run the ``downloads/download.sh`` script for you to download required software such as an app server, Solr, etc. However, these dependencies change over time so it's a place to look if ``vagrant up`` was working but later fails.
-
-On Windows if you see an error like ``/usr/bin/perl^M: bad interpreter`` you might need to run ``dos2unix`` on the installation scripts. 
-
 PlantUML
 ++++++++
 
diff --git a/doc/sphinx-guides/source/installation/advanced.rst b/doc/sphinx-guides/source/installation/advanced.rst
index a842d566595..87f2a4fd0ab 100644
--- a/doc/sphinx-guides/source/installation/advanced.rst
+++ b/doc/sphinx-guides/source/installation/advanced.rst
@@ -13,8 +13,8 @@ Multiple App Servers
 You should be conscious of the following when running multiple app servers.
 
 - Only one app server can be the dedicated timer server, as explained in the :doc:`/admin/timers` section of the Admin Guide.
-- When users upload a logo or footer for their Dataverse collection using the "theme" feature described in the :doc:`/user/dataverse-management` section of the User Guide, these logos are stored only on the app server the user happened to be on when uploading the logo. By default these logos and footers are written to the directory ``/usr/local/payara5/glassfish/domains/domain1/docroot/logos``.
-- When a sitemap is created by an app server it is written to the filesystem of just that app server. By default the sitemap is written to the directory ``/usr/local/payara5/glassfish/domains/domain1/docroot/sitemap``.
+- When users upload a logo or footer for their Dataverse collection using the "theme" feature described in the :doc:`/user/dataverse-management` section of the User Guide, these logos are stored only on the app server the user happened to be on when uploading the logo. By default these logos and footers are written to the directory ``/usr/local/payara6/glassfish/domains/domain1/docroot/logos``.
+- When a sitemap is created by an app server it is written to the filesystem of just that app server. By default the sitemap is written to the directory ``/usr/local/payara6/glassfish/domains/domain1/docroot/sitemap``.
 - If Make Data Count is used, its raw logs must be copied from each app server to single instance of Counter Processor. See also :ref:`:MDCLogPath` section in the Configuration section of this guide and the :doc:`/admin/make-data-count` section of the Admin Guide.
 - Dataset draft version logging occurs separately on each app server. See :ref:`edit-draft-versions-logging` section in Monitoring of the Admin Guide for details.
 - Password aliases (``dataverse.db.password``, etc.) are stored per app server.
diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst
index 8493702406b..f9fe74afc7c 100644
--- a/doc/sphinx-guides/source/installation/config.rst
+++ b/doc/sphinx-guides/source/installation/config.rst
@@ -501,7 +501,7 @@ Logging & Slow Performance
 File Storage: Using a Local Filesystem and/or Swift and/or Object Stores and/or Trusted Remote Stores
 -----------------------------------------------------------------------------------------------------
 
-By default, a Dataverse installation stores all data files (files uploaded by end users) on the filesystem at ``/usr/local/payara5/glassfish/domains/domain1/files``. This path can vary based on answers you gave to the installer (see the :ref:`dataverse-installer` section of the Installation Guide) or afterward by reconfiguring the ``dataverse.files.\<id\>.directory`` JVM option described below.
+By default, a Dataverse installation stores all data files (files uploaded by end users) on the filesystem at ``/usr/local/payara6/glassfish/domains/domain1/files``. This path can vary based on answers you gave to the installer (see the :ref:`dataverse-installer` section of the Installation Guide) or afterward by reconfiguring the ``dataverse.files.\<id\>.directory`` JVM option described below.
 
 A Dataverse installation can alternately store files in a Swift or S3-compatible object store, and can now be configured to support multiple stores at once. With a multi-store configuration, the location for new files can be controlled on a per-Dataverse collection basis.
 
@@ -975,7 +975,7 @@ All of these processes are triggered after finishing transfers over the wire and
 Before being moved there,
 
 - JSF Web UI uploads are stored at :ref:`${dataverse.files.uploads} <dataverse.files.uploads>`, defaulting to
-  ``/usr/local/payara5/glassfish/domains/domain1/uploads`` folder in a standard installation. This place is
+  ``/usr/local/payara6/glassfish/domains/domain1/uploads`` folder in a standard installation. This place is
   configurable and might be set to a separate disk volume where stale uploads are purged periodically.
 - API uploads are stored at the system's temporary files location indicated by the Java system property
   ``java.io.tmpdir``, defaulting to ``/tmp`` on Linux. If this location is backed by a `tmpfs <https://www.kernel.org/doc/html/latest/filesystems/tmpfs.html>`_
@@ -1053,7 +1053,7 @@ Custom Navbar Logo
 
 The Dataverse Software allows you to replace the default Dataverse Project icon and name branding in the navbar with your own custom logo. Note that this logo is separate from the logo used in the theme of the root Dataverse collection (see :ref:`theme`).
 
-The custom logo image file is expected to be small enough to fit comfortably in the navbar, no more than 50 pixels in height and 160 pixels in width. Create a ``navbar`` directory in your Payara ``logos`` directory and place your custom logo there. By default, your logo image file will be located at ``/usr/local/payara5/glassfish/domains/domain1/docroot/logos/navbar/logo.png``.
+The custom logo image file is expected to be small enough to fit comfortably in the navbar, no more than 50 pixels in height and 160 pixels in width. Create a ``navbar`` directory in your Payara ``logos`` directory and place your custom logo there. By default, your logo image file will be located at ``/usr/local/payara6/glassfish/domains/domain1/docroot/logos/navbar/logo.png``.
 
 Given this location for the custom logo image file, run this curl command to add it to your settings:
 
@@ -1518,7 +1518,7 @@ The Google Cloud Archiver also requires a key file that must be renamed to 'goog
 
 For example:
 
-``cp <your key file> /usr/local/payara5/glassfish/domains/domain1/files/googlecloudkey.json``
+``cp <your key file> /usr/local/payara6/glassfish/domains/domain1/files/googlecloudkey.json``
 
 .. _S3 Archiver Configuration:
 
@@ -1634,7 +1634,7 @@ You have a couple of options for putting an updated robots.txt file into product
 
 For more of an explanation of ``ProxyPassMatch`` see the :doc:`shibboleth` section.
 
-If you are not fronting Payara with Apache you'll need to prevent Payara from serving the robots.txt file embedded in the war file by overwriting robots.txt after the war file has been deployed. The downside of this technique is that you will have to remember to overwrite robots.txt in the "exploded" war file each time you deploy the war file, which probably means each time you upgrade to a new version of the Dataverse Software. Furthermore, since the version of the Dataverse Software is always incrementing and the version can be part of the file path, you will need to be conscious of where on disk you need to replace the file. For example, for Dataverse Software 4.6.1 the path to robots.txt may be ``/usr/local/payara5/glassfish/domains/domain1/applications/dataverse-4.6.1/robots.txt`` with the version number ``4.6.1`` as part of the path.
+If you are not fronting Payara with Apache you'll need to prevent Payara from serving the robots.txt file embedded in the war file by overwriting robots.txt after the war file has been deployed. The downside of this technique is that you will have to remember to overwrite robots.txt in the "exploded" war file each time you deploy the war file, which probably means each time you upgrade to a new version of the Dataverse Software. Furthermore, since the version of the Dataverse Software is always incrementing and the version can be part of the file path, you will need to be conscious of where on disk you need to replace the file. For example, for Dataverse Software 4.6.1 the path to robots.txt may be ``/usr/local/payara6/glassfish/domains/domain1/applications/dataverse-4.6.1/robots.txt`` with the version number ``4.6.1`` as part of the path.
 
 Creating a Sitemap and Submitting it to Search Engines
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1647,7 +1647,7 @@ Create or update your sitemap by adding the following curl command to cron to ru
 
 This will create or update a file in the following location unless you have customized your installation directory for Payara:
 
-``/usr/local/payara5/glassfish/domains/domain1/docroot/sitemap/sitemap.xml``
+``/usr/local/payara6/glassfish/domains/domain1/docroot/sitemap/sitemap.xml``
 
 On Dataverse installation with many datasets, the creation or updating of the sitemap can take a while. You can check Payara's server.log file for "BEGIN updateSiteMap" and "END updateSiteMap" lines to know when the process started and stopped and any errors in between.
 
@@ -1690,7 +1690,7 @@ When changing values these values with ``asadmin``, you'll need to delete the ol
 
 ``./asadmin create-jvm-options "-Ddataverse.fqdn=dataverse.example.com"``
 
-It's also possible to change these values by stopping Payara, editing ``payara5/glassfish/domains/domain1/config/domain.xml``, and restarting Payara.
+It's also possible to change these values by stopping Payara, editing ``payara6/glassfish/domains/domain1/config/domain.xml``, and restarting Payara.
 
 .. _dataverse.fqdn:
 
@@ -1786,7 +1786,7 @@ Configure a folder to store the incoming file stream during uploads (before tran
 Please also see :ref:`temporary-file-storage` for more details.
 You can use an absolute path or a relative, which is relative to the application server domain directory.
 
-Defaults to ``./uploads``, which resolves to ``/usr/local/payara5/glassfish/domains/domain1/uploads`` in a default
+Defaults to ``./uploads``, which resolves to ``/usr/local/payara6/glassfish/domains/domain1/uploads`` in a default
 installation.
 
 Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_UPLOADS``.
@@ -3485,7 +3485,7 @@ Sets how long a cached metrics result is used before re-running the query for a
 
 Sets the path where the raw Make Data Count logs are stored before being processed. If not set, no logs will be created for Make Data Count. See also the :doc:`/admin/make-data-count` section of the Admin Guide.
 
-``curl -X PUT -d '/usr/local/payara5/glassfish/domains/domain1/logs' http://localhost:8080/api/admin/settings/:MDCLogPath``
+``curl -X PUT -d '/usr/local/payara6/glassfish/domains/domain1/logs' http://localhost:8080/api/admin/settings/:MDCLogPath``
 
 .. _:DisplayMDCMetrics:
 
diff --git a/doc/sphinx-guides/source/installation/installation-main.rst b/doc/sphinx-guides/source/installation/installation-main.rst
index 8559d6ce194..021a97415e3 100755
--- a/doc/sphinx-guides/source/installation/installation-main.rst
+++ b/doc/sphinx-guides/source/installation/installation-main.rst
@@ -28,8 +28,8 @@ Unpack the zip file - this will create the directory ``dvinstall``.
 
 Just make sure the user running the installer has write permission to:
 
-- /usr/local/payara5/glassfish/lib
-- /usr/local/payara5/glassfish/domains/domain1
+- /usr/local/payara6/glassfish/lib
+- /usr/local/payara6/glassfish/domains/domain1
 - the current working directory of the installer (it currently writes its logfile there), and
 - your jvm-option specified files.dir
 
@@ -47,7 +47,7 @@ Follow the instructions in the text file.
 The script will prompt you for some configuration values. If this is a test/evaluation installation, it may be possible to accept the default values provided for most of the settings:
 
 - Internet Address of your host: localhost
-- Payara Directory: /usr/local/payara5
+- Payara Directory: /usr/local/payara6
 - Payara User: current user running the installer script
 - Administrator email address for this Dataverse installation: (none)
 - SMTP (mail) server to relay notification messages: localhost
@@ -98,7 +98,7 @@ The supplied site URL will be saved under the JVM option :ref:`dataverse.siteUrl
 
 **IMPORTANT:** Please note, that "out of the box" the installer will configure the Dataverse installation to leave unrestricted access to the administration APIs from (and only from) localhost. Please consider the security implications of this arrangement (anyone with shell access to the server can potentially mess with your Dataverse installation). An alternative solution would be to block open access to these sensitive API endpoints completely; and to only allow requests supplying a pre-defined "unblock token" (password). If you prefer that as a solution, please consult the supplied script ``post-install-api-block.sh`` for examples on how to set it up. See also "Securing Your Installation" under the :doc:`config` section.
 
-The Dataverse Software uses JHOVE_ to help identify the file format (CSV, PNG, etc.) for files that users have uploaded. The installer places files called ``jhove.conf`` and ``jhoveConfig.xsd`` into the directory ``/usr/local/payara5/glassfish/domains/domain1/config`` by default and makes adjustments to the jhove.conf file based on the directory into which you chose to install Payara.
+The Dataverse Software uses JHOVE_ to help identify the file format (CSV, PNG, etc.) for files that users have uploaded. The installer places files called ``jhove.conf`` and ``jhoveConfig.xsd`` into the directory ``/usr/local/payara6/glassfish/domains/domain1/config`` by default and makes adjustments to the jhove.conf file based on the directory into which you chose to install Payara.
 
 .. _JHOVE: http://jhove.openpreservation.org
 
@@ -249,7 +249,7 @@ Deleting Uploaded Files
 
 The path below will depend on the value for ``dataverse.files.directory`` as described in the :doc:`config` section:
 
-``rm -rf /usr/local/payara5/glassfish/domains/domain1/files``
+``rm -rf /usr/local/payara6/glassfish/domains/domain1/files``
 
 Rerun Installer
 ^^^^^^^^^^^^^^^
diff --git a/doc/sphinx-guides/source/installation/intro.rst b/doc/sphinx-guides/source/installation/intro.rst
index 2251af7b81b..67fc774bdbd 100644
--- a/doc/sphinx-guides/source/installation/intro.rst
+++ b/doc/sphinx-guides/source/installation/intro.rst
@@ -48,7 +48,7 @@ If you've encountered a problem installing Dataverse and are ready to ask for he
 - Operating system (usually a Linux distribution) and version.
 - Output from the installer (STDOUT, STDERR).
 - The ``scripts/api/setup-all.*.log`` files left behind by the installer.
-- The ``server.log`` file from Payara (by default at ``/usr/local/payara5/glassfish/domains/domain1/logs/server.log``).
+- The ``server.log`` file from Payara (by default at ``/usr/local/payara6/glassfish/domains/domain1/logs/server.log``).
 
 Improving this Guide
 --------------------
diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst
index d95aa78bb26..1847f1b8f63 100644
--- a/doc/sphinx-guides/source/installation/prerequisites.rst
+++ b/doc/sphinx-guides/source/installation/prerequisites.rst
@@ -19,7 +19,7 @@ We assume you plan to run your Dataverse installation on Linux and we recommend
 Java
 ----
 
-The Dataverse Software requires Java SE 11 (or higher).
+The Dataverse Software requires Java SE 17 (or higher).
 
 Installing Java
 ===============
@@ -30,11 +30,11 @@ The Oracle JDK can be downloaded from http://www.oracle.com/technetwork/java/jav
 
 On a RHEL/derivative, install OpenJDK (devel version) using yum::
 
-	# sudo yum install java-11-openjdk
+	# sudo yum install java-17-openjdk
 
-If you have multiple versions of Java installed, Java 11 should be the default when ``java`` is invoked from the command line. You can test this by running ``java -version``.
+If you have multiple versions of Java installed, Java 17 should be the default when ``java`` is invoked from the command line. You can test this by running ``java -version``.
 
-On RHEL/derivative you can make Java 11 the default with the ``alternatives`` command, having it prompt you to select the version of Java from a list::
+On RHEL/derivative you can make Java 17 the default with the ``alternatives`` command, having it prompt you to select the version of Java from a list::
 
         # alternatives --config java
 
@@ -44,7 +44,7 @@ On RHEL/derivative you can make Java 11 the default with the ``alternatives`` co
 Payara
 ------
 
-Payara 5.2022.3 is recommended. Newer versions might work fine, regular updates are recommended.
+Payara 6.2023.8 is recommended. Newer versions might work fine. Regular updates are recommended.
 
 Installing Payara
 =================
@@ -53,11 +53,11 @@ Installing Payara
 
 	# useradd dataverse
 
-- Download and install Payara (installed in ``/usr/local/payara5`` in the example commands below)::
+- Download and install Payara (installed in ``/usr/local/payara6`` in the example commands below)::
 
-	# wget https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/5.2022.3/payara-5.2022.3.zip
-	# unzip payara-5.2022.3.zip
-	# mv payara5 /usr/local
+	# wget https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip
+	# unzip payara-6.2023.8.zip
+	# mv payara6 /usr/local
 
 If nexus.payara.fish is ever down for maintenance, Payara distributions are also available from https://repo1.maven.org/maven2/fish/payara/distributions/payara/
 
@@ -65,15 +65,15 @@ If you intend to install and run Payara under a service account (and we hope you
 
 - Set service account permissions::
 
-	# chown -R root:root /usr/local/payara5
-	# chown dataverse /usr/local/payara5/glassfish/lib
-	# chown -R dataverse:dataverse /usr/local/payara5/glassfish/domains/domain1
+	# chown -R root:root /usr/local/payara6
+	# chown dataverse /usr/local/payara6/glassfish/lib
+	# chown -R dataverse:dataverse /usr/local/payara6/glassfish/domains/domain1
 
 After installation, you may chown the lib/ directory back to root; the installer only needs write access to copy the JDBC driver into that directory.
 
 - Change from ``-client`` to ``-server`` under ``<jvm-options>-client</jvm-options>``::
 
-	# vim /usr/local/payara5/glassfish/domains/domain1/config/domain.xml
+	# vim /usr/local/payara6/glassfish/domains/domain1/config/domain.xml
 
 This recommendation comes from http://www.c2b2.co.uk/middleware-blog/glassfish-4-performance-tuning-monitoring-and-troubleshooting.php among other places.
 
@@ -97,10 +97,14 @@ Also note that Payara may utilize more than the default number of file descripto
 PostgreSQL
 ----------
 
+PostgreSQL 13 is recommended because it's the version we test against. Version 10 or higher is required because that's what's `supported by Flyway <https://documentation.red-gate.com/fd/postgresql-184127604.html>`_, which we use for database migrations.
+
+You are welcome to experiment with newer versions of PostgreSQL, but please note that as of PostgreSQL 15, permissions have been restricted on the ``public`` schema (`release notes <https://www.postgresql.org/docs/release/15.0/>`_, `EDB blog post <https://www.enterprisedb.com/blog/new-public-schema-permissions-postgresql-15>`_, `Crunchy Data blog post <https://www.crunchydata.com/blog/be-ready-public-schema-changes-in-postgres-15>`_). The Dataverse installer has been updated to restore the old permissions, but this may not be a long term solution.
+
 Installing PostgreSQL
 =====================
 
-The application has been tested with PostgreSQL versions up to 13 and version 10+ is required. We recommend installing the latest version that is available for your OS distribution. *For example*, to install PostgreSQL 13 under RHEL7/derivative::
+*For example*, to install PostgreSQL 13 under RHEL7/derivative::
 
 	# yum install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-7-x86_64/pgdg-redhat-repo-latest.noarch.rpm
 	# yum makecache fast
@@ -154,12 +158,12 @@ Configuring Database Access for the Dataverse Installation (and the Dataverse So
 Solr
 ----
 
-The Dataverse Software search index is powered by Solr.
+The Dataverse software search index is powered by Solr.
 
 Supported Versions
 ==================
 
-The Dataverse Software has been tested with Solr version 8.11.1. Future releases in the 8.x series are likely to be compatible; however, this cannot be confirmed until they are officially tested. Major releases above 8.x (e.g. 9.x) are not supported.
+The Dataverse software has been tested with Solr version 9.3.0. Future releases in the 9.x series are likely to be compatible. Please get in touch (:ref:`support`) if you are having trouble with a newer version.
 
 Installing Solr
 ===============
@@ -174,19 +178,19 @@ Become the ``solr`` user and then download and configure Solr::
 
         su - solr
         cd /usr/local/solr
-        wget https://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz
-        tar xvzf solr-8.11.1.tgz
-        cd solr-8.11.1
+        wget https://archive.apache.org/dist/solr/solr/9.3.0/solr-9.3.0.tgz
+        tar xvzf solr-9.3.0.tgz
+        cd solr-9.3.0
         cp -r server/solr/configsets/_default server/solr/collection1
 
 You should already have a "dvinstall.zip" file that you downloaded from https://github.com/IQSS/dataverse/releases . Unzip it into ``/tmp``. Then copy the files into place::
 
-        cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-8.11.1/server/solr/collection1/conf
-        cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-8.11.1/server/solr/collection1/conf
+        cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf
+        cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf
 
 Note: The Dataverse Project team has customized Solr to boost results that come from certain indexed elements inside the Dataverse installation, for example prioritizing results from Dataverse collections over Datasets. If you would like to remove this, edit your ``solrconfig.xml`` and remove the ``<str name="qf">`` element and its contents. If you have ideas about how this boosting could be improved, feel free to contact us through our Google Group https://groups.google.com/forum/#!forum/dataverse-dev .
 
-A Dataverse installation requires a change to the ``jetty.xml`` file that ships with Solr. Edit ``/usr/local/solr/solr-8.11.1/server/etc/jetty.xml`` , increasing ``requestHeaderSize`` from ``8192`` to ``102400``
+A Dataverse installation requires a change to the ``jetty.xml`` file that ships with Solr. Edit ``/usr/local/solr/solr-9.3.0/server/etc/jetty.xml`` , increasing ``requestHeaderSize`` from ``8192`` to ``102400``
 
 Solr will warn about needing to increase the number of file descriptors and max processes in a production environment but will still run with defaults. We have increased these values to the recommended levels by adding ulimit -n 65000 to the init script, and the following to ``/etc/security/limits.conf``::
 
@@ -205,7 +209,7 @@ Solr launches asynchronously and attempts to use the ``lsof`` binary to watch fo
 
 Finally, you need to tell Solr to create the core "collection1" on startup::
 
-        echo "name=collection1" > /usr/local/solr/solr-8.11.1/server/solr/collection1/core.properties
+        echo "name=collection1" > /usr/local/solr/solr-9.3.0/server/solr/collection1/core.properties
 
 Solr Init Script
 ================
diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst
index d5ffb2acb66..2000a2097f0 100755
--- a/doc/sphinx-guides/source/versions.rst
+++ b/doc/sphinx-guides/source/versions.rst
@@ -7,7 +7,8 @@ Dataverse Software Documentation Versions
 This list provides a way to refer to the documentation for previous and future versions of the Dataverse Software. In order to learn more about the updates delivered from one version to another, visit the `Releases <https://github.com/IQSS/dataverse/releases>`__ page in our GitHub repo.
 
 - pre-release `HTML (not final!) <http://preview.guides.gdcc.io/en/develop/>`__ and `PDF (experimental!) <http://preview.guides.gdcc.io/_/downloads/en/develop/pdf/>`__ built from the :doc:`develop </developers/version-control>` branch :doc:`(how to contribute!) </developers/documentation>`
-- 5.14
+- 6.0
+- `5.14 </en/5.14/>`__
 - `5.13 </en/5.13/>`__
 - `5.12.1 </en/5.12.1/>`__
 - `5.12 </en/5.12/>`__
diff --git a/downloads/.gitignore b/downloads/.gitignore
deleted file mode 100644
index 1b51bf4def7..00000000000
--- a/downloads/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-payara-5.201.zip
-solr-7.3.0.tgz
-weld-osgi-bundle-2.2.10.Final-glassfish4.jar
-schemaSpy_5.0.0.jar
diff --git a/downloads/download.sh b/downloads/download.sh
deleted file mode 100755
index 7b9de0397cb..00000000000
--- a/downloads/download.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-curl -L -O https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2022.3/payara-5.2022.3.zip
-curl -L -O https://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz
-curl -L -O https://search.maven.org/remotecontent?filepath=org/jboss/weld/weld-osgi-bundle/2.2.10.Final/weld-osgi-bundle-2.2.10.Final-glassfish4.jar
-curl -s -L http://sourceforge.net/projects/schemaspy/files/schemaspy/SchemaSpy%205.0.0/schemaSpy_5.0.0.jar/download > schemaSpy_5.0.0.jar
diff --git a/downloads/stata-13-test-files/Stata14TestFile.dta b/downloads/stata-13-test-files/Stata14TestFile.dta
deleted file mode 100644
index 6f1c31dc798..00000000000
Binary files a/downloads/stata-13-test-files/Stata14TestFile.dta and /dev/null differ
diff --git a/local_lib/com/apicatalog/titanium-json-ld/1.3.0-SNAPSHOT/titanium-json-ld-1.3.0-SNAPSHOT.jar b/local_lib/com/apicatalog/titanium-json-ld/1.3.0-SNAPSHOT/titanium-json-ld-1.3.0-SNAPSHOT.jar
deleted file mode 100644
index ee499ae4b76..00000000000
Binary files a/local_lib/com/apicatalog/titanium-json-ld/1.3.0-SNAPSHOT/titanium-json-ld-1.3.0-SNAPSHOT.jar and /dev/null differ
diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar
deleted file mode 100644
index dc41f94046f..00000000000
Binary files a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar and /dev/null differ
diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.md5 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.md5
deleted file mode 100644
index 7018ea4e822..00000000000
--- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.md5
+++ /dev/null
@@ -1 +0,0 @@
-eeef5c0dc201d1105b9529a51fa8cdab
diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.sha1 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.sha1
deleted file mode 100644
index 97f192f3732..00000000000
--- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1fa716d318920fd59fc63f77965d113decf97355
diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom
deleted file mode 100644
index ea2e4c03f9f..00000000000
--- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.harvard.iq.dvn</groupId>
-  <artifactId>unf5</artifactId>
-  <version>5.0</version>
-</project>
diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.md5 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.md5
deleted file mode 100644
index a88cf2a1c02..00000000000
--- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.md5
+++ /dev/null
@@ -1 +0,0 @@
-2df5dac09375e1e7fcd66c705d9ca2ef
diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.sha1 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.sha1
deleted file mode 100644
index 967b977b79e..00000000000
--- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.sha1
+++ /dev/null
@@ -1 +0,0 @@
-431cd55e2e9379677d14e402dd3c474bb7be4ac9
diff --git a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.jar b/local_lib/net/handle/handle/8.1.1/handle-8.1.1.jar
deleted file mode 100644
index 1f8e1c3eb12..00000000000
Binary files a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.jar and /dev/null differ
diff --git a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.pom b/local_lib/net/handle/handle/8.1.1/handle-8.1.1.pom
deleted file mode 100644
index e3c09349172..00000000000
--- a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.pom
+++ /dev/null
@@ -1,9 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>net.handle</groupId>
-  <artifactId>handle</artifactId>
-  <version>8.1.1</version>
-  <description>POM was created from install:install-file</description>
-</project>
diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar
deleted file mode 100644
index b3bddd62c24..00000000000
Binary files a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar and /dev/null differ
diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.md5 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.md5
deleted file mode 100644
index 576062f55a1..00000000000
--- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.md5
+++ /dev/null
@@ -1 +0,0 @@
-b0abb2fee242c479f305f47352600bbf
diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.sha1 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.sha1
deleted file mode 100644
index e81e8450ef0..00000000000
--- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9643e138cb5ed2684838e4b4faa118adfb2ecb4b
diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom
deleted file mode 100644
index b57cd67278b..00000000000
--- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>nom.tam.fits</groupId>
-  <artifactId>fits</artifactId>
-  <version>2012-10-25-generated</version>
-</project>
diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.md5 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.md5
deleted file mode 100644
index 777b4df3325..00000000000
--- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.md5
+++ /dev/null
@@ -1 +0,0 @@
-23ca47c46df791f220a87cfef3b2190c
diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.sha1 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.sha1
deleted file mode 100644
index b5f41fd1a69..00000000000
--- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c1ec9dfbbc72dc4623d309d772b804e47284ee27
diff --git a/local_lib/nom/tam/fits/fits/maven-metadata.xml b/local_lib/nom/tam/fits/fits/maven-metadata.xml
deleted file mode 100644
index 4fc3254df3f..00000000000
--- a/local_lib/nom/tam/fits/fits/maven-metadata.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<metadata>
-  <groupId>nom.tam.fits</groupId>
-  <artifactId>fits</artifactId>
-  <versioning>
-    <release>2012-10-25-generated</release>
-    <versions>
-      <version>2012-10-25-generated</version>
-    </versions>
-    <lastUpdated>20130925190525</lastUpdated>
-  </versioning>
-</metadata>
diff --git a/local_lib/nom/tam/fits/fits/maven-metadata.xml.md5 b/local_lib/nom/tam/fits/fits/maven-metadata.xml.md5
deleted file mode 100644
index b6d7e4a726f..00000000000
--- a/local_lib/nom/tam/fits/fits/maven-metadata.xml.md5
+++ /dev/null
@@ -1 +0,0 @@
-545c78160393b4c80e40377f2a7cf406
\ No newline at end of file
diff --git a/local_lib/nom/tam/fits/fits/maven-metadata.xml.sha1 b/local_lib/nom/tam/fits/fits/maven-metadata.xml.sha1
deleted file mode 100644
index 188cf8ae044..00000000000
--- a/local_lib/nom/tam/fits/fits/maven-metadata.xml.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9cf56b8ef3f2bacdc669c2c7cdcd7cd50ed38dbb
\ No newline at end of file
diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar b/local_lib/org/dataverse/unf/6.0/unf-6.0.jar
deleted file mode 100644
index d2738e2dadd..00000000000
Binary files a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar and /dev/null differ
diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.md5 b/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.md5
deleted file mode 100644
index 04ca3e73ce8..00000000000
--- a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.md5
+++ /dev/null
@@ -1 +0,0 @@
-bd9b84a9ad737a81a2699ab81541a901
diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.sha1 b/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.sha1
deleted file mode 100644
index a48cef32570..00000000000
--- a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4cad279c362e4c5c17a2058dc2c8f2fc97c76bf8
diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom b/local_lib/org/dataverse/unf/6.0/unf-6.0.pom
deleted file mode 100644
index 06f1508723f..00000000000
--- a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>org.dataverse</groupId>
-  <artifactId>unf</artifactId>
-  <version>6.0</version>
-</project>
diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.md5 b/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.md5
deleted file mode 100644
index 138bc9c95f6..00000000000
--- a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.md5
+++ /dev/null
@@ -1 +0,0 @@
-230c5b1f5ae71bb2fe80ef9e7209f681
diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.sha1 b/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.sha1
deleted file mode 100644
index 689e8045418..00000000000
--- a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.sha1
+++ /dev/null
@@ -1 +0,0 @@
-286b819f2fc7432a94b5940c6171be1589f66a37
diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile
index bbd02a14328..97aa4cd2792 100644
--- a/modules/container-base/src/main/docker/Dockerfile
+++ b/modules/container-base/src/main/docker/Dockerfile
@@ -164,6 +164,8 @@ RUN <<EOF
     ${ASADMIN} create-jvm-options '-XX\:MetaspaceSize=${ENV=MEM_METASPACE_SIZE}'
     ${ASADMIN} create-jvm-options '-XX\:MaxMetaspaceSize=${ENV=MEM_MAX_METASPACE_SIZE}'
     ${ASADMIN} create-jvm-options '-XX\:+IgnoreUnrecognizedVMOptions'
+    # Workaround for FISH-7722: Failed to deploy war with @Stateless https://github.com/payara/Payara/issues/6337
+    ${ASADMIN} create-jvm-options --add-opens=java.base/java.io=ALL-UNNAMED
     # Disable autodeploy and hot reload
     ${ASADMIN} set configs.config.server-config.admin-service.das-config.dynamic-reload-enabled="false"
     ${ASADMIN} set configs.config.server-config.admin-service.das-config.autodeploy-enabled="false"
@@ -206,7 +208,7 @@ RUN <<EOF
     # Stop domain
     ${ASADMIN} stop-domain "${DOMAIN_NAME}"
     # Disable JSP servlet dynamic reloads
-    sed -i 's#<servlet-class>org.apache.jasper.servlet.JspServlet</servlet-class>#<servlet-class>org.apache.jasper.servlet.JspServlet</servlet-class>\n    <init-param>\n      <param-name>development</param-name>\n      <param-value>false</param-value>\n    </init-param>\n    <init-param>\n      <param-name>genStrAsCharArray</param-name>\n      <param-value>true</param-value>\n    </init-param>#' "${DOMAIN_DIR}/config/default-web.xml"
+    sed -i 's#<servlet-class>org.glassfish.wasp.servlet.JspServlet</servlet-class>#<servlet-class>org.glassfish.wasp.servlet.JspServlet</servlet-class>\n    <init-param>\n      <param-name>development</param-name>\n      <param-value>false</param-value>\n    </init-param>\n    <init-param>\n      <param-name>genStrAsCharArray</param-name>\n      <param-value>true</param-value>\n    </init-param>#' "${DOMAIN_DIR}/config/default-web.xml"
     # Cleanup old CA certificates to avoid unnecessary log clutter during startup
     ${SCRIPT_DIR}/removeExpiredCaCerts.sh
     # Delete generated files
diff --git a/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh b/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh
index 205a9eda5d7..c019c09130e 100644
--- a/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh
+++ b/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh
@@ -8,6 +8,14 @@
 set -euo pipefail
 
 KEYSTORE="${DOMAIN_DIR}/config/cacerts.jks"
+if [ ! -r "${KEYSTORE}" ]; then
+  KEYSTORE="${DOMAIN_DIR}/config/cacerts.p12"
+  if [ ! -r "${KEYSTORE}" ]; then
+    echo "Could not find CA certs keystore"
+    exit 1
+  fi
+fi
+
 keytool -list -v -keystore "${KEYSTORE}" -storepass changeit 2>/dev/null | \
     grep -i 'alias\|until' > aliases.txt
 
diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile
index cbda948db14..564216b3572 100644
--- a/modules/container-configbaker/Dockerfile
+++ b/modules/container-configbaker/Dockerfile
@@ -33,6 +33,7 @@ RUN chmod +x ${SCRIPT_DIR}/*.sh ${BOOTSTRAP_DIR}/*/*.sh
 # Copy the Solr config bits
 COPY --from=solr /opt/solr/server/solr/configsets/_default ${SOLR_TEMPLATE}/
 COPY maven/solr/*.xml ${SOLR_TEMPLATE}/conf/
+RUN rm ${SOLR_TEMPLATE}/conf/managed-schema.xml
 
 # Copy the data from scripts/api that provide the common base setup you'd get from the installer.
 # ".dockerignore" will take care of taking only the bare necessities
diff --git a/modules/container-configbaker/assembly.xml b/modules/container-configbaker/assembly.xml
index f5b309175ed..3285eef510a 100644
--- a/modules/container-configbaker/assembly.xml
+++ b/modules/container-configbaker/assembly.xml
@@ -8,7 +8,7 @@
         </fileSet>
         <!-- Get our custom Solr files -->
         <fileSet>
-            <directory>conf/solr/8.11.1</directory>
+            <directory>conf/solr/9.3.0</directory>
             <outputDirectory>solr</outputDirectory>
         </fileSet>
         <!-- Get the setup scripts from the installer (selected choice only) -->
@@ -43,4 +43,4 @@
             </excludes>
         </fileSet>
     </fileSets>
-</assembly>
\ No newline at end of file
+</assembly>
diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml
index 05f7874d31c..c45d59e4f5f 100644
--- a/modules/dataverse-parent/pom.xml
+++ b/modules/dataverse-parent/pom.xml
@@ -131,9 +131,9 @@
  
     <properties>
         <!-- This is a special Maven property name, do not change! -->
-        <revision>5.14</revision>
+        <revision>6.0</revision>
     
-        <target.java.version>11</target.java.version>
+        <target.java.version>17</target.java.version>
         <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
         <additionalparam>-Xdoclint:none</additionalparam>
         <!-- Needed to avoid IDEA IDE compilation failures. See commits in GH #5059 -->
@@ -148,9 +148,9 @@
         <argLine>-Duser.timezone=${project.timezone} -Dfile.encoding=${project.build.sourceEncoding} -Duser.language=${project.language} -Duser.region=${project.region}</argLine>
     
         <!-- Major system components and dependencies -->
-        <payara.version>5.2022.3</payara.version>
-        <postgresql.version>42.5.1</postgresql.version>
-        <solr.version>8.11.1</solr.version>
+        <payara.version>6.2023.8</payara.version>
+        <postgresql.version>42.6.0</postgresql.version>
+        <solr.version>9.3.0</solr.version>
         <aws.version>1.12.290</aws.version>
         <google.cloud.version>0.177.0</google.cloud.version>
     
@@ -165,16 +165,14 @@
         <apache.httpcomponents.core.version>4.4.14</apache.httpcomponents.core.version>
         
         <!-- NEW gdcc XOAI library implementation -->
-        <gdcc.xoai.version>5.0.0</gdcc.xoai.version>
+        <gdcc.xoai.version>5.1.0</gdcc.xoai.version>
     
         <!-- Testing dependencies -->
         <testcontainers.version>1.15.0</testcontainers.version>
         <smallrye-mpconfig.version>2.10.1</smallrye-mpconfig.version>
-    
-        <junit.version>4.13.1</junit.version>
-        <junit.jupiter.version>5.7.0</junit.jupiter.version>
-        <junit.vintage.version>${junit.jupiter.version}</junit.vintage.version>
-        <mockito.version>2.28.2</mockito.version>
+        
+        <junit.jupiter.version>5.10.0</junit.jupiter.version>
+        <mockito.version>5.4.0</mockito.version>
         
         <checkstyle.version>9.3</checkstyle.version>
         
@@ -193,6 +191,7 @@
         <maven-source-plugin.version>3.2.1</maven-source-plugin.version>
         <maven-javadoc-plugin.version>3.4.1</maven-javadoc-plugin.version>
         <maven-flatten-plugin.version>1.3.0</maven-flatten-plugin.version>
+        <maven-enforcer-plugin.version>3.3.0</maven-enforcer-plugin.version>
         
         <maven-checkstyle-plugin.version>3.1.2</maven-checkstyle-plugin.version>
         <nexus-staging-plugin.version>1.6.13</nexus-staging-plugin.version>
@@ -255,6 +254,11 @@
                     <artifactId>maven-failsafe-plugin</artifactId>
                     <version>${maven-failsafe-plugin.version}</version>
                 </plugin>
+                <plugin>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-enforcer-plugin</artifactId>
+                    <version>${maven-enforcer-plugin.version}</version>
+                </plugin>
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-checkstyle-plugin</artifactId>
@@ -314,6 +318,47 @@
                 </plugin>
             </plugins>
         </pluginManagement>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-enforcer-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>no-junit4</id>
+                        <phase>generate-test-resources</phase>
+                        <goals>
+                            <goal>enforce</goal>
+                        </goals>
+                        <configuration>
+                            <rules>
+                                <bannedDependencies>
+                                    <excludes>
+                                        <exclude>junit:junit:*:*:test</exclude>
+                                        <exclude>org.junit:junit:*:*:test</exclude>
+                                        <exclude>org.junit.vintage:*:*:*:test</exclude>
+                                    </excludes>
+                                </bannedDependencies>
+                            </rules>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>general-reqs</id>
+                        <goals>
+                            <goal>enforce</goal>
+                        </goals>
+                        <phase>initialize</phase>
+                        <configuration>
+                            <rules>
+                                <banDuplicatePomDependencyVersions/>
+                                <requireJavaVersion>
+                                    <version>[${target.java.version}.0,)</version>
+                                </requireJavaVersion>
+                            </rules>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
     </build>
     
     <!--Maven checks for dependencies from these repos in the order shown in the pom.xml
@@ -369,15 +414,20 @@
             <name>Unidata All</name>
             <url>https://artifacts.unidata.ucar.edu/repository/unidata-all/</url>
         </repository>
-        <repository>
-            <id>dvn.private</id>
-            <name>Local repository for hosting jars not available from network repositories.</name>
-            <url>file://${project.basedir}/local_lib</url>
-        </repository>
-        <!-- Uncomment when using snapshot releases from Maven Central 
+        <!-- Uncomment when using snapshot releases from Maven Central -->
         <repository>
             <id>oss-sonatype</id>
             <name>oss-sonatype</name>
+            <url>
+                https://oss.sonatype.org/content/repositories/snapshots/
+            </url>
+            <snapshots>
+                <enabled>true</enabled>
+            </snapshots>
+        </repository>
+        <repository>
+            <id>s01-oss-sonatype</id>
+            <name>s01-oss-sonatype</name>
             <url>
                 https://s01.oss.sonatype.org/content/repositories/snapshots/
             </url>
@@ -385,7 +435,7 @@
                 <enabled>true</enabled>
             </snapshots>
         </repository>
-        -->
+        <!-- -->
     </repositories>
     
     <profiles>
@@ -393,11 +443,9 @@
             <id>ct</id>
             <properties>
                 <!--
-                    Payara 5.2022.3 has problems with postboot deployment scripts.
-                    Fixed in this release, see https://github.com/payara/Payara/pull/5991
-                    Payara 5.2022.4 has security issues.
+                    With moving to Payara 6, we are aligned with containers and classic install again.
                 -->
-                <payara.version>5.2022.5</payara.version>
+                <!-- <payara.version>5.2022.5</payara.version> -->
             </properties>
     
             <build>
diff --git a/modules/dataverse-spi/pom.xml b/modules/dataverse-spi/pom.xml
index 6235d309e89..b00053fe5e0 100644
--- a/modules/dataverse-spi/pom.xml
+++ b/modules/dataverse-spi/pom.xml
@@ -13,7 +13,7 @@
     
     <groupId>io.gdcc</groupId>
     <artifactId>dataverse-spi</artifactId>
-    <version>1.0.0${project.version.suffix}</version>
+    <version>2.0.0${project.version.suffix}</version>
     <packaging>jar</packaging>
     
     <name>Dataverse SPI Plugin API</name>
diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java
index 228992c8288..d039ac39e8f 100644
--- a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java
+++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java
@@ -3,8 +3,8 @@
 import java.io.InputStream;
 import java.util.Optional;
 
-import javax.json.JsonArray;
-import javax.json.JsonObject;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
 
 /**
  * Provides all the metadata Dataverse has about a given dataset that can then
diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/XMLExporter.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/XMLExporter.java
index 9afe7ba1cfd..3c3fa35c69d 100644
--- a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/XMLExporter.java
+++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/XMLExporter.java
@@ -1,6 +1,6 @@
 package io.gdcc.spi.export;
 
-import javax.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.MediaType;
 
 /**
  * XML Exporter is an extension of the base Exporter interface that adds the
diff --git a/pom.xml b/pom.xml
index 96f598af0f5..7ba22d2a076 100644
--- a/pom.xml
+++ b/pom.xml
@@ -26,7 +26,7 @@
         <packaging.type>war</packaging.type>
         
         <reload4j.version>1.2.18.4</reload4j.version>
-        <flyway.version>8.5.10</flyway.version>
+        <flyway.version>9.21.2</flyway.version>
         <jhove.version>1.20.1</jhove.version>
         <jacoco.version>0.8.7</jacoco.version>
         <poi.version>5.2.1</poi.version>
@@ -96,7 +96,7 @@
         <dependency>
             <groupId>io.gdcc</groupId>
             <artifactId>sword2-server</artifactId>
-            <version>1.2.1</version>
+            <version>2.0.0</version>
         </dependency>
         <!-- Dependency to use sword2-server in our codebase -->
         <dependency>
@@ -119,7 +119,7 @@
         <dependency>
           <groupId>com.apicatalog</groupId>
           <artifactId>titanium-json-ld</artifactId>
-          <version>1.3.0-SNAPSHOT</version>
+          <version>1.3.2</version>
         </dependency>
         <dependency>
             <groupId>com.google.code.gson</groupId>
@@ -160,12 +160,20 @@
             <artifactId>flyway-core</artifactId>
             <version>${flyway.version}</version>
         </dependency>
+        <!-- Enable resolution of the JPA provider in persistence.xml -->
+        <dependency>
+            <groupId>org.eclipse.persistence</groupId>
+            <artifactId>org.eclipse.persistence.jpa</artifactId>
+            <scope>provided</scope>
+        </dependency>
         <dependency>
             <groupId>com.google.guava</groupId>
             <artifactId>guava</artifactId>
-            <version>29.0-jre</version>
+            <version>32.1.2-jre</version>
             <type>jar</type>
         </dependency>
+        
+        <!-- Jakarta EE & Eclipse MicroProfile base dependencies -->
         <dependency>
             <groupId>org.eclipse.microprofile.config</groupId>
             <artifactId>microprofile-config-api</artifactId>
@@ -174,13 +182,14 @@
         <dependency>
             <groupId>jakarta.platform</groupId>
             <artifactId>jakarta.jakartaee-api</artifactId>
-            <version>${jakartaee-api.version}</version>
             <scope>provided</scope>
         </dependency>
-        <!-- JSON-P -->
+        
+        <!-- Jakarta Activation, MIME support etc -->
+        <!-- Runtime implementation here only, as necessary for testing. -->
         <dependency>
-            <groupId>org.glassfish</groupId>
-            <artifactId>jakarta.json</artifactId>
+            <groupId>org.eclipse.angus</groupId>
+            <artifactId>angus-activation</artifactId>
             <scope>provided</scope>
             <!-- no version here as managed by Payara BOM above! -->
         </dependency>
@@ -188,12 +197,20 @@
             <groupId>fish.payara.api</groupId>
             <artifactId>payara-api</artifactId>
             <scope>provided</scope>
+            <!-- Note: The version was provided by Payara BOM till 6.2023.7, when they removed the Core BOM from it as
+                       meant for internal use only. Simply referencing the version property here solves the problem. -->
+            <version>${payara.version}</version>
         </dependency>
+        
+        <!-- JSON-P -->
+        <!-- Runtime implementation here only, as necessary for testing. -->
         <dependency>
-            <groupId>com.sun.mail</groupId>
-            <artifactId>jakarta.mail</artifactId>
+            <groupId>org.eclipse.parsson</groupId>
+            <artifactId>jakarta.json</artifactId>
             <scope>provided</scope>
         </dependency>
+        
+        <!-- Jakarta Faces & related -->
         <dependency>
             <groupId>org.glassfish</groupId>
             <artifactId>jakarta.faces</artifactId>
@@ -203,6 +220,7 @@
             <groupId>org.primefaces</groupId>
             <artifactId>primefaces</artifactId>
             <version>11.0.0</version>
+            <classifier>jakarta</classifier>
         </dependency>
         <dependency>
             <groupId>org.primefaces.themes</groupId>
@@ -212,9 +230,10 @@
         <dependency>
             <groupId>org.omnifaces</groupId>
             <artifactId>omnifaces</artifactId>
-            <version>3.8</version> <!-- Or 1.8-SNAPSHOT -->
+            <version>4.0-M13</version>
         </dependency>
         
+        <!-- Jakarta Validation API & runtime -->
         <dependency>
             <groupId>jakarta.validation</groupId>
             <artifactId>jakarta.validation-api</artifactId>
@@ -225,9 +244,12 @@
             <artifactId>hibernate-validator</artifactId>
             <scope>provided</scope>
         </dependency>
+    
+        <!-- Jakarta Expression Language -->
+        <!-- Runtime implementation here only, as necessary for testing. -->
         <dependency>
-            <groupId>org.glassfish</groupId>
-            <artifactId>jakarta.el</artifactId>
+            <groupId>org.glassfish.expressly</groupId>
+            <artifactId>expressly</artifactId>
             <scope>provided</scope>
         </dependency>
         
@@ -261,29 +283,23 @@
         <dependency>
             <groupId>org.apache.solr</groupId>
             <artifactId>solr-solrj</artifactId>
-            <version>8.11.1</version>
+            <version>9.3.0</version>
         </dependency>
         <dependency>
             <groupId>colt</groupId>
             <artifactId>colt</artifactId>
             <version>1.2.0</version>
         </dependency>
-        <!-- fits.jar, not available from network repos, supplied in local_lib -->
+        <!-- FITS -->
         <dependency>
-            <groupId>nom.tam.fits</groupId>
-            <artifactId>fits</artifactId>
-            <version>2012-10-25-generated</version>
+            <groupId>gov.nasa.gsfc.heasarc</groupId>
+            <artifactId>nom-tam-fits</artifactId>
+            <version>1.12.0</version>
         </dependency>
         <dependency>
             <groupId>net.handle</groupId>
-            <artifactId>handle</artifactId>
-            <version>8.1.1</version>
-        </dependency>
-        <!-- UNF v5 (buggy), (temporarily) added for testing ingest against DVN v3  - L.A. -->
-        <dependency>
-            <groupId>edu.harvard.iq.dvn</groupId>
-            <artifactId>unf5</artifactId>
-            <version>5.0</version>
+            <artifactId>handle-client</artifactId>
+            <version>9.3.1</version>
         </dependency>
         <!-- (new) UNF v6: -->
         <dependency>
@@ -343,18 +359,24 @@
         <dependency>
             <groupId>org.ocpsoft.rewrite</groupId>
             <artifactId>rewrite-servlet</artifactId>
-            <version>3.5.0.Final</version>
+            <version>6.0.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.ocpsoft.rewrite</groupId>
             <artifactId>rewrite-config-prettyfaces</artifactId>
-            <version>3.5.0.Final</version>
+            <version>6.0.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>edu.ucsb.nceas</groupId>
             <artifactId>ezid</artifactId>
             <version>1.0.0</version>
             <type>jar</type>
+            <exclusions>
+                <exclusion>
+                    <groupId>junit</groupId>
+                    <artifactId>junit</artifactId>
+                </exclusion>
+            </exclusions>
         </dependency>
         <dependency>
             <groupId>org.jsoup</groupId>
@@ -394,7 +416,7 @@
             <artifactId>oauth2-oidc-sdk</artifactId>
             <version>10.7.1</version>
         </dependency>
-        <!-- New and Improved GDCC XOAI library! --> 
+        <!-- New and Improved GDCC XOAI library! -->
         <dependency>
             <groupId>io.gdcc</groupId>
             <artifactId>xoai-data-provider</artifactId>
@@ -415,15 +437,13 @@
         </dependency>
         <!-- For API File Upload: 1 of 2 -->
         <dependency>
-            <groupId>org.glassfish.jersey.containers</groupId>
-            <artifactId>jersey-container-servlet</artifactId>
-            <version>2.23.2</version>
+            <groupId>org.glassfish.jersey.core</groupId>
+            <artifactId>jersey-server</artifactId>
         </dependency>
         <!-- For API File Upload: 2 of 2 -->
         <dependency>
             <groupId>org.glassfish.jersey.media</groupId>
             <artifactId>jersey-media-multipart</artifactId>
-            <version>2.23.2</version>
         </dependency>
         <dependency>
             <groupId>com.mashape.unirest</groupId>
@@ -513,7 +533,7 @@
         <dependency>
             <groupId>io.gdcc</groupId>
             <artifactId>dataverse-spi</artifactId>
-            <version>1.0.0</version>
+            <version>2.0.0</version>
         </dependency>
         <!-- TESTING DEPENDENCIES -->
         <dependency>
@@ -522,18 +542,6 @@
             <version>${junit.jupiter.version}</version>
             <scope>test</scope>
         </dependency>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <version>${junit.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.junit.vintage</groupId>
-            <artifactId>junit-vintage-engine</artifactId>
-            <version>${junit.vintage.version}</version>
-            <scope>test</scope>
-        </dependency>
         <dependency>
             <groupId>org.hamcrest</groupId>
             <artifactId>hamcrest-library</artifactId>
@@ -553,9 +561,9 @@
             <scope>test</scope>
         </dependency>
         <dependency>
-            <groupId>com.jayway.restassured</groupId>
+            <groupId>io.rest-assured</groupId>
             <artifactId>rest-assured</artifactId>
-            <version>2.4.0</version>
+            <version>5.3.1</version>
             <scope>test</scope>
         </dependency>
         <dependency>
@@ -574,6 +582,12 @@
             <groupId>org.testcontainers</groupId>
             <artifactId>testcontainers</artifactId>
             <scope>test</scope>
+            <exclusions>
+                <exclusion>
+                    <groupId>junit</groupId>
+                    <artifactId>junit</artifactId>
+                </exclusion>
+            </exclusions>
         </dependency>
         <dependency>
             <groupId>org.testcontainers</groupId>
@@ -761,7 +775,7 @@
                 <activeByDefault>true</activeByDefault>
             </activation>
             <properties>
-                <testsToExclude>edu.harvard.iq.dataverse.NonEssentialTests</testsToExclude>
+                <testsToExclude>not-essential-unittests</testsToExclude>
             </properties>
         </profile>
         <profile>
diff --git a/scripts/dev/dev-rebuild.sh b/scripts/dev/dev-rebuild.sh
index 71857b14068..9eae195b135 100755
--- a/scripts/dev/dev-rebuild.sh
+++ b/scripts/dev/dev-rebuild.sh
@@ -1,8 +1,9 @@
 #!/bin/sh
-PAYARA_DIR=/usr/local/payara5
+PAYARA_DIR=/usr/local/payara6
 ASADMIN=$PAYARA_DIR/glassfish/bin/asadmin
 DB_NAME=dvndb
 DB_USER=dvnapp
+export PGPASSWORD=secret
 
 echo "Checking if there is a war file to undeploy..."
 LIST_APP=$($ASADMIN list-applications -t)
@@ -23,7 +24,7 @@ echo "Deleting ALL DATA FILES uploaded to Dataverse..."
 rm -rf $PAYARA_DIR/glassfish/domains/domain1/files
 
 echo "Terminating database sessions so we can drop the database..."
-psql -U postgres -c "
+psql -h localhost -U postgres -c "
 SELECT pg_terminate_backend(pg_stat_activity.pid)
 FROM pg_stat_activity
 WHERE pg_stat_activity.datname = '$DB_NAME'
@@ -31,14 +32,14 @@ WHERE pg_stat_activity.datname = '$DB_NAME'
 " template1
 
 echo "Dropping the database..."
-psql -U $DB_USER -c "DROP DATABASE \"$DB_NAME\"" template1
+psql -h localhost -U $DB_USER -c "DROP DATABASE \"$DB_NAME\"" template1
 echo $?
 
 echo "Clearing out data from Solr..."
-curl http://localhost:8983/solr/collection1/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}"
+curl "http://localhost:8983/solr/collection1/update/json?commit=true" -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}"
 
 echo "Creating a new database..."
-psql -U $DB_USER -c "CREATE DATABASE \"$DB_NAME\" WITH OWNER = \"$DB_USER\"" template1
+psql -h localhost -U $DB_USER -c "CREATE DATABASE \"$DB_NAME\" WITH OWNER = \"$DB_USER\"" template1
 echo $?
 
 echo "Starting app server..."
@@ -53,7 +54,7 @@ cd scripts/api
 cd ../..
 
 echo "Creating SQL sequence..."
-psql -U $DB_USER $DB_NAME -f doc/sphinx-guides/source/_static/util/createsequence.sql
+psql -h localhost -U $DB_USER $DB_NAME -f doc/sphinx-guides/source/_static/util/createsequence.sql
 
 echo "Setting DOI provider to \"FAKE\"..." 
 curl http://localhost:8080/api/admin/settings/:DoiProvider -X PUT -d FAKE
diff --git a/scripts/installer/Makefile b/scripts/installer/Makefile
index d40d4d792ea..399bc65168a 100644
--- a/scripts/installer/Makefile
+++ b/scripts/installer/Makefile
@@ -7,7 +7,6 @@ JHOVE_SCHEMA=${INSTALLER_ZIP_DIR}/jhoveConfig.xsd
 SOLR_SCHEMA=${INSTALLER_ZIP_DIR}/schema.xml ${INSTALLER_ZIP_DIR}/update-fields.sh
 SOLR_CONFIG=${INSTALLER_ZIP_DIR}/solrconfig.xml
 PYTHON_FILES=${INSTALLER_ZIP_DIR}/README_python.txt ${INSTALLER_ZIP_DIR}/installConfig.py ${INSTALLER_ZIP_DIR}/installUtils.py ${INSTALLER_ZIP_DIR}/install.py ${INSTALLER_ZIP_DIR}/installAppServer.py ${INSTALLER_ZIP_DIR}/requirements.txt ${INSTALLER_ZIP_DIR}/default.config ${INSTALLER_ZIP_DIR}/interactive.config
-INSTALL_SCRIPT=${INSTALLER_ZIP_DIR}/install
 
 installer:	dvinstall.zip
 
@@ -56,13 +55,13 @@ ${JHOVE_SCHEMA}:	../../conf/jhove/jhoveConfig.xsd ${INSTALLER_ZIP_DIR}
 	@echo copying jhove schema file
 	/bin/cp ../../conf/jhove/jhoveConfig.xsd ${INSTALLER_ZIP_DIR}
 
-${SOLR_SCHEMA}:		../../conf/solr/8.11.1/schema.xml ../../conf/solr/8.11.1/update-fields.sh ${INSTALLER_ZIP_DIR}
+${SOLR_SCHEMA}:		../../conf/solr/9.3.0/schema.xml ../../conf/solr/9.3.0/update-fields.sh ${INSTALLER_ZIP_DIR}
 	@echo copying Solr schema file
-	/bin/cp ../../conf/solr/8.11.1/schema.xml ../../conf/solr/8.11.1/update-fields.sh ${INSTALLER_ZIP_DIR}
+	/bin/cp ../../conf/solr/9.3.0/schema.xml ../../conf/solr/9.3.0/update-fields.sh ${INSTALLER_ZIP_DIR}
 
-${SOLR_CONFIG}:		../../conf/solr/8.11.1/solrconfig.xml ${INSTALLER_ZIP_DIR}
+${SOLR_CONFIG}:		../../conf/solr/9.3.0/solrconfig.xml ${INSTALLER_ZIP_DIR}
 	@echo copying Solr config file
-	/bin/cp ../../conf/solr/8.11.1/solrconfig.xml ${INSTALLER_ZIP_DIR}
+	/bin/cp ../../conf/solr/9.3.0/solrconfig.xml ${INSTALLER_ZIP_DIR}
 
 ${PYTHON_FILES}: README_python.txt install.py installConfig.py installAppServer.py installUtils.py requirements.txt default.config interactive.config ${INSTALLER_ZIP_DIR}
 	@echo copying Python installer files
diff --git a/scripts/installer/README.txt b/scripts/installer/README.txt
index 350a17fc00c..c3ed8211082 100644
--- a/scripts/installer/README.txt
+++ b/scripts/installer/README.txt
@@ -1,42 +1 @@
-The installer script (install) can be run either by a developer (inside the source tree), or by an end-user installing the Dataverse. The latter will obtain the script as part of the distribution bundle; and they will be running it inside the unzipped bundle directory. 
-
-In the former (developer) case, the installer will be looking for the files it needs in the other directories in the source tree. 
-For example, the war file (once built) can be found in ../../target/. The name of the war file will be dataverse-{VERSION}.war, where
-{VERSION} is the version number of the Dataverse, obtained from the pom file (../../pom.xml). For example, as of writing this README.txt (July 2015) the war file is ../../target/dataverse-4.1.war/
-
-When building a distribution archive, the Makefile will pile all the files that the installer needs in one directory (./dvinstall here) and then zip it up. We upload the resulting zip bundle on github as the actual software release. This way the end user only gets the files they actually need to install the Dataverse app. So they can do so without pulling the entire source tree. 
-
-
-The installer script itself (the perl script ./install) knows to look for all these files in 2 places (for example, it will look for the war file in ../../target/; if it's not there, it'll assume this is a distribution bundle and look for it as ./dataverse.war)
-
-Here's the list of the files that the installer needs: 
-
-the war file:
-target/dataverse-{VERSION}.war
-
-and also:
-
-from scripts/installer (this directory):
-
-install
-glassfish-setup.sh
-
-from scripts/api:
-
-setup-all.sh
-setup-builtin-roles.sh
-setup-datasetfields.sh
-setup-dvs.sh
-setup-identity-providers.sh
-setup-users.sh
-data (the entire directory with all its contents)
-
-from conf/jhove:
-
-jhove.conf
-
-SOLR schema and config files, from conf/solr/8.11.1:
-
-schema.xml
-schema_dv_mdb_fields.xml
-solrconfig.xml
+See README_python.txt
diff --git a/scripts/installer/as-setup.sh b/scripts/installer/as-setup.sh
index 49ebce059d2..fc5b378cff5 100755
--- a/scripts/installer/as-setup.sh
+++ b/scripts/installer/as-setup.sh
@@ -56,15 +56,15 @@ function preliminary_setup()
 
   # avoid OutOfMemoryError: PermGen per http://eugenedvorkin.com/java-lang-outofmemoryerror-permgen-space-error-during-deployment-to-glassfish/
   #./asadmin $ASADMIN_OPTS list-jvm-options
-  # Note that these JVM options are different for Payara5 and Glassfish4:
+  # Note that these JVM options are different for Payara and Glassfish4:
   # old Glassfish4 options: (commented out)
   #./asadmin $ASADMIN_OPTS delete-jvm-options "-XX\:MaxPermSize=192m"
   #./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:MaxPermSize=512m"
   #./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:PermSize=256m"
-  # payara5 ships with the "-server" option already in domain.xml, so no need:
+  # Payara ships with the "-server" option already in domain.xml, so no need:
   #./asadmin $ASADMIN_OPTS delete-jvm-options -client
 
-  # new Payara5 options: (thanks to donsizemore@unc.edu)
+  # new Payara options: (thanks to donsizemore@unc.edu)
   ./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:MaxMetaspaceSize=512m"
   ./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:MetaspaceSize=256m"
   ./asadmin $ASADMIN_OPTS create-jvm-options "-Dfish.payara.classloading.delegate=false"
@@ -116,6 +116,9 @@ function preliminary_setup()
 
   ./asadmin $ASADMIN_OPTS create-jvm-options "-Ddataverse.timerServer=true"
 
+  # Workaround for FISH-7722: Failed to deploy war with @Stateless https://github.com/payara/Payara/issues/6337
+  ./asadmin $ASADMIN_OPTS create-jvm-options --add-opens=java.base/java.io=ALL-UNNAMED
+
   # enable comet support
   ./asadmin $ASADMIN_OPTS set server-config.network-config.protocols.protocol.http-listener-1.http.comet-support-enabled="true"
 
@@ -155,18 +158,18 @@ function final_setup(){
 
 if [ "$DOCKER_BUILD" = "true" ]
   then
-    FILES_DIR="/usr/local/payara5/glassfish/domains/domain1/files"
+    FILES_DIR="/usr/local/payara6/glassfish/domains/domain1/files"
     RSERVE_HOST="localhost"
     RSERVE_PORT="6311"
     RSERVE_USER="rserve"
     RSERVE_PASS="rserve"
     HOST_ADDRESS="localhost\:8080"
-    pushd /usr/local/payara5/glassfish/bin/
+    pushd /usr/local/payara6/glassfish/bin/
     ./asadmin start-domain domain1
     preliminary_setup
-    chmod -R 777 /usr/local/payara5/
-    rm -rf /usr/local/payara5/glassfish/domains/domain1/generated 
-    rm -rf /usr/local/payara5/glassfish/domains/domain1/applications
+    chmod -R 777 /usr/local/payara6/
+    rm -rf /usr/local/payara6/glassfish/domains/domain1/generated
+    rm -rf /usr/local/payara6/glassfish/domains/domain1/applications
     popd
     exit 0
 fi
@@ -276,7 +279,7 @@ if [ ! -d "$DOMAIN_DIR" ]
     exit 2
 fi
 
-echo "Setting up your app. server (Payara5) to support Dataverse"
+echo "Setting up your app. server (Payara) to support Dataverse"
 echo "Payara directory: "$GLASSFISH_ROOT
 echo "Domain directory:    "$DOMAIN_DIR
 
diff --git a/scripts/installer/default.config b/scripts/installer/default.config
index 312dd2cb2d8..8647cd02416 100644
--- a/scripts/installer/default.config
+++ b/scripts/installer/default.config
@@ -1,7 +1,7 @@
 [glassfish]
 HOST_DNS_ADDRESS = localhost
 GLASSFISH_USER = dataverse
-GLASSFISH_DIRECTORY = /usr/local/payara5
+GLASSFISH_DIRECTORY = /usr/local/payara6
 GLASSFISH_ADMIN_USER = admin
 GLASSFISH_ADMIN_PASSWORD = secret
 GLASSFISH_HEAP = 2048
diff --git a/scripts/installer/install b/scripts/installer/install
deleted file mode 100755
index 2208f014606..00000000000
--- a/scripts/installer/install
+++ /dev/null
@@ -1,1538 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-use Getopt::Long;
-use Socket;
-use File::Copy;
-
-# command line options:
-
-my $verbose;
-my $postgresonly;
-my $hostname;
-my $gfuser;
-my $gfdir;
-my $mailserver;
-my $noninteractive;
-my $skipdatabasesetup;
-my $force;
-my $nogfpasswd;
-my $admin_email;
-
-my ($rez) = GetOptions(
-    #"length=i" => \$length,    # numeric
-    #"file=s"   => \$data,      # string
-    "verbose"      => \$verbose,
-    "pg_only"      => \$postgresonly,
-    "skip_db_setup" => \$skipdatabasesetup,
-    "hostname=s"   => \$hostname,
-    "gfuser=s"     => \$gfuser,
-    "gfdir=s"      => \$gfdir,
-    "mailserver=s" => \$mailserver,
-    "y|yes"        => \$noninteractive,
-    "f|force"      => \$force,
-    "nogfpasswd"   => \$nogfpasswd,
-    "admin_email=s" => \$admin_email,
-);
-
-# openshift/docker-specific - name of the "pod" executing the installer:
-my $pod_name = "";
-if (exists($ENV{'MY_POD_NAME'}))
-{
-    $pod_name = $ENV{'MY_POD_NAME'};
-}
-
-my $jq_exec_path = "";
-my $psql_exec_path = "";
-my $cwd;
-my $WARFILE_LOCATION = "dataverse.war";
-
-
-my @CONFIG_VARIABLES;
-
-if ($postgresonly) 
-{
-    @CONFIG_VARIABLES =
-      ( 'POSTGRES_SERVER', 'POSTGRES_PORT', 'POSTGRES_DATABASE', 'POSTGRES_USER', 'POSTGRES_PASSWORD', 'POSTGRES_ADMIN_PASSWORD' );
-
-} 
-else 
-{
-
-    @CONFIG_VARIABLES = (
-	'HOST_DNS_ADDRESS',
-	'GLASSFISH_USER',
-	'GLASSFISH_DIRECTORY',
-	'ADMIN_EMAIL',
-	'MAIL_SERVER',
-
-	'POSTGRES_SERVER',
-	'POSTGRES_PORT',
-	'POSTGRES_ADMIN_PASSWORD',
-	'POSTGRES_DATABASE',
-	'POSTGRES_USER',
-	'POSTGRES_PASSWORD',
-
-	'SOLR_LOCATION', 
-	
-	'RSERVE_HOST',
-	'RSERVE_PORT',
-	'RSERVE_USER',
-	'RSERVE_PASSWORD',
-
-	'DOI_USERNAME',
-	'DOI_PASSWORD',
-	'DOI_BASEURL',
-	'DOI_DATACITERESTAPIURL'
-
-	);
-}
-
-my %CONFIG_DEFAULTS; 
-
-&read_config_defaults("default.config");
-
-my %CONFIG_PROMPTS;
-my %CONFIG_COMMENTS; 
-
-&read_interactive_config_values("interactive.config");
-
-my $API_URL = "http://localhost:8080/api";
-
-# jodbc.postgresql.org recommends 4.2 for Java 8.
-# updated drivers may be obtained from
-#  https://jdbc.postgresql.org/download.html
-my $postgres_jdbc = "postgresql-42.2.12.jar";
-
-# 0. A few preliminary checks:
-
-# 0a. OS:
-
-my $uname_out = `uname -a`;
-
-my @uname_tokens = split( " ", $uname_out );
-
-my $WORKING_OS;
-if ( $uname_tokens[0] eq "Darwin" ) {
-    print "\nThis appears to be a MacOS X system; good.\n";
-    # TODO: check the OS version
-
-    $WORKING_OS = "MacOSX";
-}
-elsif ( $uname_tokens[0] eq "Linux" ) {
-    if ( -f "/etc/redhat-release" ) {
-        print "\nThis appears to be a RedHat system; good.\n";
-        $WORKING_OS = "RedHat";
-        # TODO: check the distro version
-    }
-    else {
-        print "\nThis appears to be a non-RedHat Linux system;\n";
-        print "this installation *may* succeed; but we're not making any promises!\n";
-        $WORKING_OS = "Linux";
-    }
-} else {
-    print "\nWARNING: This appears to be neither a Linux or MacOS X system!\n";
-    print "This installer script will most likely fail. Please refer to the\n";
-    print "DVN Installers Guide for more information.\n\n";
-
-    $WORKING_OS = "Unknown";
-
-    unless ($noninteractive) {
-        exit 0;
-    }
-
-    print "(Normally we would stop right there; but since the \"--yes\" option was specified, we'll attempt to continue)\n\n";
-
-}
-
-
-# 0b. host name:
-
-if ($hostname) {
-    $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} = $hostname;
-} else {
-    my $hostname_from_cmdline = `hostname`;
-    chop $hostname_from_cmdline;
-
-    $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} = $hostname_from_cmdline;
-}
-
-# 0c. check if there is the default.config file with the pre-set configuration values: 
-
-#  read default configuration values from tab separated file "default.config" if it exists
-#  moved after the $hostname_from_cmdline section to avoid excessively complicating the logic
-#  of command line argument, automatic selection, or config file.
-#
-#  NOTE: if the file contain any Postgres configuration (for example: "POSTGRES_USER   dvnApp")
-#  but an environmental variable with the same name exists - THE ENV. VAR WILL WIN! (don't ask)
-#  (actually this is to accommodate the openshift docker deployment scenario)
-
-sub trim { my $s = shift; $s =~ s/^\s+|\s+$//g; return $s };
-
-#my $config_default_file = "default.config";
-#
-#if ( -e $config_default_file )
-#{
-#	print("loading default configuration values from $config_default_file\n");
-#	open( my $inp_cfg, $config_default_file );
-#	while( my $ln = <$inp_cfg> )
-#	{
-#		my @xs = split('\t', $ln );
-#		if ( 2 == @xs )
-#		{
-#			my $k = $xs[0];
-#                        my $v = trim($xs[1]);
-##                        if (defined $ENV{$k} && ($k eq "POSTGRES_USER" || $k eq "POSTGRES_PASSWORD")) {
-##                            $v = $ENV{$k};
-##                        }
-##                        if (defined $ENV{'POSTGRESQL_ADMIN_PASSWORD'} && $k eq "POSTGRES_ADMIN_PASSWORD")   {
-##                            $v = $ENV{'POSTGRESQL_ADMIN_PASSWORD'};
-##                        }
-#                        $CONFIG_DEFAULTS{$k}=$v;
-#		}
-#	}
-#}
-#else
-#{
-#	print("using hard-coded default configuration values (no $config_default_file available)\n");
-#}
-
-# 0d. current OS user. (the first one we find wins)
-
-my $current_user = $ENV{LOGNAME} || $ENV{USER} || getpwuid($<);
-
-# if the username was specified on the command-line, it takes precendence:
-if ($gfuser) {
-    print "Using CLI-specified user $gfuser.\n";
-    $CONFIG_DEFAULTS{'GLASSFISH_USER'} = $gfuser;
-}
-
-
-if (!$CONFIG_DEFAULTS{'GLASSFISH_USER'} || !$noninteractive) {
-   $CONFIG_DEFAULTS{'GLASSFISH_USER'} = $current_user;
-   print "using $current_user.\n";
-}
-
-
-# prefer that we not install as root.
-unless ( $< != 0 ) {
-    print "####################################################################\n";
-    print "     It is recommended that this script not be run as root.\n";
-    print " Consider creating the service account \"dataverse\", giving it ownership\n";
-    print "  on the glassfish/domains/domain1/ and glassfish/lib/ directories,\n";
-    print "    along with the JVM-specified files.dir location, and designate\n";
-    print "    that account to launch and run the Application Server (Payara),\n";
-    print "           AND use that user account to run this installer.\n"; 
-    print "####################################################################\n";
-
-    unless ($noninteractive)
-    {
-	print "\nPress any key to continue, or ctrl-C to exit the installer...\n\n";
-        system "stty cbreak </dev/tty >/dev/tty 2>&1";
-        unless ($noninteractive) {
-            my $key = getc(STDIN);
-        }
-        system "stty -cbreak </dev/tty >/dev/tty 2>&1";
-	print "\n";
-    }
-}
-
-# ensure $gfuser exists or bail
-my $gfidcmd="id $CONFIG_DEFAULTS{'GLASSFISH_USER'} > /dev/null";
-my $gfreturncode=system($gfidcmd);
-if ($gfreturncode != 0) {
-   die "Couldn't find user $gfuser. Please ensure the account exists and is readable by the user running this installer.\n";
-}
-
-# 0e. the following 2 options can also be specified on the command line, and 
-# also take precedence over the default values that are hard-coded and/or 
-# provided in the default.config file:
-
-if ($mailserver) {
-    $CONFIG_DEFAULTS{'MAIL_SERVER'} = $mailserver;
-}
-
-if ($gfdir) {
-    $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} = $gfdir;
-}
-
-# 1. CHECK FOR SOME MANDATORY COMPONENTS (WAR FILE, ETC.)
-# since we can't do anything without these things in place, better check for 
-# them before we go into the interactive config mode. 
-# (skip if this is a database-only setup)
-
-unless ($postgresonly) 
-{
-# 1a. war file: 
-    print "\nChecking if the application .war file is available... ";
-
-# if this installer section is running out of the installer zip bundle directory,
-# the war file will be sitting right here, named "dataverse.war": 
-
-    $WARFILE_LOCATION = "dataverse.war"; 
-
-# but if it's not here, this is probably a personal development 
-# setup, so their build should be up in their source tree:
-
-    unless ( -f $WARFILE_LOCATION ) {
-	my $DATAVERSE_VERSION = "";
-	my $DATAVERSE_POM_FILE = "../../modules/dataverse-parent/pom.xml";
-	if ( -f $DATAVERSE_POM_FILE ) 
-	{
-	    open DPF, $DATAVERSE_POM_FILE; 
-	    my $pom_line;
-	    while ($pom_line=<DPF>)
-	    {
-		chop $pom_line;
-		if ($pom_line =~/^[ \t]*<revision>([0-9\.]+)<\/revision>/)
-		{
-		    $DATAVERSE_VERSION=$1;
-		    last;
-		}	    
-	    }
-	    close DPF;
-	    
-	    if ($DATAVERSE_VERSION ne "") {
-		$WARFILE_LOCATION = "../../target/dataverse-" . $DATAVERSE_VERSION . ".war";
-	    }
-	}
-    }
-
-# But, if the war file cannot be found in either of the 2
-# places - we'll just have to give up:
-
-    unless ( -f $WARFILE_LOCATION ) {
-	print "\nWARNING: Can't find the project .war file!\n";
-	print "\tAre you running the installer in the right directory?\n";
-	print "\tHave you built the war file?\n";
-	print "\t(if not, build the project and run the installer again)\n";
-	
-	exit 0;
-    }
-    print " Yes, it is!\n";
-
-
-# 1b. check and remember the working dir:
-    chomp( $cwd = `pwd` );
-
-# 1d. jq executable: 
-
-    my $sys_path = $ENV{'PATH'};
-    my @sys_path_dirs = split( ":", $sys_path );
-
-    if ( $pod_name ne "start-glassfish") # Why is that again? 
-    {
-	for my $sys_path_dir (@sys_path_dirs) {
-	    if ( -x $sys_path_dir . "/jq" ) {
-		$jq_exec_path = $sys_path_dir;
-		last;
-	    }
-	}
-	if ( $jq_exec_path eq "" ) {
-	    print STDERR "\nERROR: I haven't been able to find the jq command in your PATH! Please install it from http://stedolan.github.io/jq/\n";
-	    exit 1;
-
-	}
-    }
-
-}
-
-
-# 2. INTERACTIVE CONFIG SECTION: 
-
-print "\nWelcome to the Dataverse installer.\n";
-unless ($postgresonly) {
-    print "You will be guided through the process of setting up a NEW\n";
-    print "instance of the dataverse application\n";
-}
-else {
-    print "You will be guided through the process of configuring your\n";
-    print "PostgreSQL database for use by the Dataverse application.\n";
-}
-
-my $yesno;
-
-unless ($noninteractive) 
-{
-    print "\nATTENTION: As of Dataverse v.4.19, we are offering a new, experimental \n";
-    print "version of the installer script, implemented in Python. It will eventually \n";
-    print "replace this script (implemented in Perl). Consult the file README_python.txt \n";
-    print "for more information on how to run it. \n";
-
-    print "\nWould you like to exit and use the new installer instead? [y/n] ";
-    $yesno = <>;
-    chop $yesno;
-
-    while ( $yesno ne "y" && $yesno ne "n" ) {
-	print "Please enter 'y' or 'n'!\n";
-	print "(or ctrl-C to exit the installer)\n";
-	$yesno = <>;
-	chop $yesno;
-    }
-
-    exit 0 if  $yesno eq "y";
-}
-
-ENTERCONFIG:
-
-print "\n";
-print "Please enter the following configuration values:\n";
-print "(hit [RETURN] to accept the default value)\n";
-print "\n";
-
-for my $ENTRY (@CONFIG_VARIABLES) 
-{
-    my $config_prompt = $CONFIG_PROMPTS{$ENTRY};
-    my $config_comment = $CONFIG_COMMENTS{$ENTRY};
-
-    if ( $config_comment eq '' ) 
-    {
-	print $config_prompt . ": ";
-	print "[" . $CONFIG_DEFAULTS{$ENTRY} . "] ";
-    }
-    else 
-    {
-	print $config_prompt . $config_comment;
-	print "[" . $CONFIG_DEFAULTS{$ENTRY} . "] ";
-    }
-
-    my $user_entry = "";
-
-    # ($noninteractive means the installer is being run in the non-interactive mode; it will use 
-    # the default values specified so far, without prompting the user for alternative values)\
-    unless ($noninteractive) 
-    {
-        $user_entry = <>;
-        chop $user_entry;
-
-	if ( $user_entry ne "" ) {
-	    $CONFIG_DEFAULTS{$ENTRY} = $user_entry;
-	}
-
-	# for some values, we'll try to do some validation right here, in real time:
-    
-	if ($ENTRY eq 'ADMIN_EMAIL') 
-	{
-	    $user_entry = $CONFIG_DEFAULTS{$ENTRY};
-	    my $attempts = 0; 
-	    while ($user_entry !~/[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}/) 
-	    {
-		$attempts++;
-		print "Please enter a valid email address: ";
-		$user_entry = <>;
-		chop $user_entry;
-	    }
-
-	    if ($attempts) 
-	    {
-		print "OK, looks legit.\n";
-		$CONFIG_DEFAULTS{$ENTRY} = $user_entry;
-	    }
-	}
-	elsif ($ENTRY eq 'GLASSFISH_DIRECTORY') 
-	{
-            # CHECK IF GLASSFISH DIR LOOKS OK:
-	    print "\nChecking your Glassfish installation...";
-
-	    my $g_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'};
-
-
-	    unless ( -d $g_dir . "/glassfish/domains/domain1" ) 
-	    {
-		while ( !( -d $g_dir . "/glassfish/domains/domain1" ) ) 
-		{
-		    print "\nInvalid Glassfish directory " . $g_dir . "!\n";
-		    print "Enter the root directory of your Glassfish installation:\n";
-		    print "(Or ctrl-C to exit the installer): ";
-
-		    $g_dir = <>;
-		    chop $g_dir;
-		}
-	    }
-
-	    # verify that we can write in the Glassfish directory 
-	    # (now that we are no longer requiring to run the installer as root)
-
-	    my @g_testdirs = ( "/glassfish/domains/domain1",
-			       "/glassfish/domains/domain1/config",
-			       "/glassfish/lib");
-
-	    for my $test_dir (@g_testdirs)
-	    {
-		if (!(-w ($g_dir . $test_dir))) 
-		{
-		    print "\n";
-		    die("ERROR: " . $g_dir . $test_dir . " not writable to the user running the installer! Check permissions on Payara5 hierarchy.\n");
-		}
-	    }
-
-
-
-	    print "$g_dir looks OK!\n";
-	    $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} = $g_dir; 
-
-	}
-	elsif ($ENTRY eq 'MAIL_SERVER')
-	{
-	    my $smtp_server = "";
-	    while (! &validate_smtp_server() )
-	    {
-		print "Enter a valid SMTP (mail) server:\n";
-		print "(Or ctrl-C to exit the installer): ";
-
-		$smtp_server = <>;
-		chop $smtp_server;
-
-		$CONFIG_DEFAULTS{'MAIL_SERVER'} = $smtp_server unless $smtp_server eq ''; 
-	    }
-
-	    print "\nOK, we were able to establish connection to the SMTP server you have specified.\n";
-	    print "Please note that you *may* need to configure some extra settings before your \n";
-	    print "Dataverse can send email. Please consult the \"Mail Host Configuration & Authentication\"\n";
-	    print "section of the installation guide (http://guides.dataverse.org/en/latest/installation/installation-main.html)\n";
-	    print "for more information.\n";
-	}
-    }
-
-    print "\n";
-}
-
-# 2b. CONFIRM VALUES ENTERED:
-
-print "\nOK, please confirm what you've entered:\n\n";
-
-for my $ENTRY (@CONFIG_VARIABLES) {
-    print $CONFIG_PROMPTS{$ENTRY} . ": " . $CONFIG_DEFAULTS{$ENTRY} . "\n";
-}
-
-if ($noninteractive) {
-    $yesno = "y";
-}
-else {
-    print "\nIs this correct? [y/n] ";
-    $yesno = <>;
-    chop $yesno;
-}
-
-while ( $yesno ne "y" && $yesno ne "n" ) {
-    print "Please enter 'y' or 'n'!\n";
-    print "(or ctrl-C to exit the installer)\n";
-    $yesno = <>;
-    chop $yesno;
-}
-
-if ( $yesno eq "n" ) {
-    goto ENTERCONFIG;
-}
-
-# 3. SET UP POSTGRES USER AND DATABASE
-
-unless($pod_name eq "start-glassfish" || $pod_name eq "dataverse-glassfish-0" || $skipdatabasesetup) {
-    &setup_postgres(); 
-# (there's no return code - if anything goes wrong, the method will exit the script, with some diagnostic messages for the user)
-    print "\nOK, done.\n";
-
-    if ($postgresonly) 
-    {
-	exit 0;
-    }
-}
-
-# 5. CONFIGURE PAYARA
-
-my $glassfish_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'};
-
-my $done = &setup_appserver();
-
-# Check if the App is running: 
-
-unless ((
-     my $exit_code =
-     system( $glassfish_dir . "/bin/asadmin list-applications | grep -q '^dataverse'" )
-	) == 0 )
-{
-    # If the "asadmin list-applications" has failed, it may only mean that an earlier
-    # "asadmin login" had failed, and asadmin is now failing to run without the user
-    # supplying the username and password. (And the fact that we are trying to pile the  
-    # output to grep prevents it from providing the prompts). 
-    # So before we give up, we'll try an alternative: 
-
-    unless ((
-	my $exit_code_2 =
-	system( "curl http://localhost:8080/robots.txt | grep -q '^User-agent'" )
-	    ) == 0 )
-    {
-	print STDERR "It appears that the Dataverse application is not running...\n";
-	print STDERR "Even though the \"asadmin deploy\" command had succeeded earlier.\n\n";
-	print STDERR "Aborting - sorry...\n\n";
-
-	exit 1; 
-    }
-}
-
-
-print "\nOK, the Dataverse application appears to be running...\n\n";
-
-# Run the additional setup scripts, that populate the metadata block field values, create users
-# and dataverses, etc.
-
-unless ( -d "data" && -f "setup-datasetfields.sh" && -f "setup-users.sh" && -f "setup-dvs.sh" && -f "setup-all.sh" ) {
-    chdir("../api");
-}
-
-unless ( -d "data" && -f "setup-datasetfields.sh" && -f "setup-users.sh" && -f "setup-dvs.sh" && -f "setup-builtin-roles.sh" && -f "setup-all.sh" ) {
-    print "\nERROR: Can't find the metadata and user/dataverse setup scripts!\n";
-    print "\tAre you running the installer in the right directory?\n";
-    exit 1;
-}
-
-# if there's an admin_email set from arguments, replace the value in `dv-root.json` (called by `setup-all.sh`)
-if ($admin_email)
-{
-	print "setting contact email for root dataverse to: $admin_email\n";
-	set_root_contact_email( $admin_email );
-}
-else
-{
-	print "using default contact email for root dataverse\n";
-}
-
-for my $script ( "setup-all.sh" ) {
-    # (there's only 1 setup script to run now - it runs all the other required scripts)
-    print "Executing post-deployment setup script " . $script . "... ";
-
-    my $my_hostname = $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'};
-
-    # We used to filter the supplied scripts, replacing "localhost" and the port, in 
-    # case they are running Dataverse on a different port... Now we are simply requiring
-    # that the port 8080 is still configured in domain.xml when they are running the 
-    # installer:
-    my $run_script;
-    #if ( $my_hostname ne "localhost" ) {
-    #    system( "sed 's/localhost:8080/$my_hostname/g' < " . $script . " > tmpscript.sh; chmod +x tmpscript.sh" );
-    #    $run_script = "tmpscript.sh";
-    #}
-    #else {
-    $run_script = $script;
-    #}
-
-    unless ( my $exit_code = system( "./" . $run_script . " > $run_script.$$.log 2>&1") == 0 ) 
-    {
-        print "\nERROR executing script " . $script . "!\n";
-        exit 1;
-    }
-    print "done!\n";
-}
-
-# SOME ADDITIONAL SETTINGS THAT ARE NOT TAKEN CARE OF BY THE setup-all SCRIPT 
-# NEED TO BE CONFIGURED HERE:
-
-print "Making additional configuration changes...\n\n";
-
-
-# a. Configure the Admin email in the Dataverse settings:
-
-print "Executing " . "curl -X PUT -d " . $CONFIG_DEFAULTS{'ADMIN_EMAIL'} . " " . $API_URL . "/admin/settings/:SystemEmail" . "\n";
-
-my $exit_code = system("curl -X PUT -d " . $CONFIG_DEFAULTS{'ADMIN_EMAIL'} . " " . $API_URL . "/admin/settings/:SystemEmail"); 
-if ( $exit_code )       
-{
-    print "WARNING: failed to configure the admin email in the Dataverse settings!\n\n";
-} 
-else 
-{
-    print "OK.\n\n";
-}
-    
-# b. If this installation is going to be using a remote SOLR search engine service, configure its location in the settings:
-
-if ($CONFIG_DEFAULTS{'SOLR_LOCATION'} ne 'LOCAL')
-{
-    print "Executing " . "curl -X PUT -d " . $CONFIG_DEFAULTS{'SOLR_LOCATION'} . " " . $API_URL . "/admin/settings/:SolrHostColonPort" . "\n";
-    my $exit_code = system("curl -X PUT -d " . $CONFIG_DEFAULTS{'SOLR_LOCATION'} . " " . $API_URL . "/admin/settings/:SolrHostColonPort"); 
-    if ( $exit_code )       
-    {
-	print "WARNING: failed to configure the location of the remote SOLR service!\n\n";
-    }
-    else 
-    {
-	print "OK.\n\n";
-    }
-}
-
-
-
-chdir($cwd);
-
-print "\n\nYou should now have a running Dataverse instance at\n";
-print "  http://" . $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} . ":8080\n\n\n";
-
-if ($WARFILE_LOCATION =~/([0-9]\.[0-9]\.[0-9])\.war$/) 
-{
-    my $version = $1;
-    print "If this is a personal development installation, we recommend that you undeploy the currently-running copy \n"; 
-    print "of the application, with the following asadmin command:\n\n";
-    print "\t" . $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . '/bin/asadmin undeploy dataverse-' . $version . "\n\n";
-    print "before attempting to deploy from your development environment in NetBeans.\n\n";
-}
-    
-
-print "\nYour Dataverse has been configured to use DataCite, to register DOI global identifiers in the \n";
-print "test name space \"10.5072\" with the \"shoulder\" \"FK2\"\n";
-print "However, you have to contact DataCite (support\@datacite.org) and request a test account, before you \n";
-print "can publish datasets. Once you receive the account name and password, add them to your domain.xml,\n";
-print "as the following two JVM options:\n";
-print "\t<jvm-options>-Ddoi.username=...</jvm-options>\n";
-print "\t<jvm-options>-Ddoi.password=...</jvm-options>\n";
-print "and restart payara5\n";
-print "If this is a production Dataverse and you are planning to register datasets as \n";
-print "\"real\", non-test DOIs or Handles, consult the \"Persistent Identifiers and Publishing Datasets\"\n";
-print "section of the Installataion guide, on how to configure your Dataverse with the proper registration\n";
-print "credentials.\n\n";
-
-
-
-# (going to skip the Rserve check; it's no longer a required, or even a recommended component)
-
-exit 0;
-
-# 9. FINALLY, CHECK IF RSERVE IS RUNNING:
-print "\n\nFinally, checking if Rserve is running and accessible...\n";
-
-unless ( $CONFIG_DEFAULTS{'RSERVE_PORT'} =~ /^[0-9][0-9]*$/ ) {
-    print $CONFIG_DEFAULTS{'RSERVE_HOST'} . " does not look like a valid port number,\n";
-    print "defaulting to 6311.\n\n";
-
-    $CONFIG_DEFAULTS{'RSERVE_PORT'} = 6311;
-}
-
-my ( $rserve_iaddr, $rserve_paddr, $rserve_proto );
-
-unless ( $rserve_iaddr = inet_aton( $CONFIG_DEFAULTS{'RSERVE_HOST'} ) ) {
-    print STDERR "Could not look up $CONFIG_DEFAULTS{'RSERVE_HOST'},\n";
-    print STDERR "the host you specified as your R server.\n";
-    print STDERR "\nDVN can function without a working R server, but\n";
-    print STDERR "much of the functionality concerning running statistics\n";
-    print STDERR "and analysis on quantitative data will not be available.\n";
-    print STDERR "Please consult the Installers guide for more info.\n";
-
-    exit 0;
-}
-
-$rserve_paddr = sockaddr_in( $CONFIG_DEFAULTS{'RSERVE_PORT'}, $rserve_iaddr );
-$rserve_proto = getprotobyname('tcp');
-
-unless ( socket( SOCK, PF_INET, SOCK_STREAM, $rserve_proto )
-    && connect( SOCK, $rserve_paddr ) )
-{
-    print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'RSERVE_HOST'}\n";
-    print STDERR "on port $CONFIG_DEFAULTS{'RSERVE_PORT'}, the address you provided\n";
-    print STDERR "for your R server.\n";
-    print STDERR "DVN can function without a working R server, but\n";
-    print STDERR "much of the functionality concerning running statistics\n";
-    print STDERR "and analysis on quantitative data will not be available.\n";
-    print STDERR "Please consult the \"Installing R\" section in the Installers guide\n";
-    print STDERR "for more info.\n";
-
-    exit 0;
-
-}
-
-close(SOCK);
-print "\nOK!\n";
-
-# 5. CONFIGURE PAYARA
-sub setup_appserver {
-    my $success = 1;
-    my $failure = 0;
-
-    my $glassfish_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'};
-
-    print "\nProceeding with the app. server (Payara5) setup.\n";
-
-# 5a. DETERMINE HOW MUCH MEMORY TO GIVE TO GLASSFISH AS HEAP:
-
-    my $gf_heap_default = "2048m";
-    my $sys_mem_total   = 0;
-
-    if ( -e "/proc/meminfo" && open MEMINFO, "/proc/meminfo" ) {
-	# Linux
-
-	while ( my $mline = <MEMINFO> ) {
-	    if ( $mline =~ /MemTotal:[ \t]*([0-9]*) kB/ ) {
-		$sys_mem_total = $1;
-	    }
-	}
-	
-	close MEMINFO;
-
-# TODO: Figure out how to determine the amount of memory when running in Docker
-# because we're wondering if Dataverse can run in the free OpenShift Online
-# offering that only gives you 1 GB of memory. Obviously, if this is someone's
-# first impression of Dataverse, we want to to run well! What if you try to
-# ingest a large file or perform other memory-intensive operations? For more
-# context, see https://github.com/IQSS/dataverse/issues/4040#issuecomment-331282286
-	if ( -e "/sys/fs/cgroup/memory/memory.limit_in_bytes" && open CGROUPMEM, "/sys/fs/cgroup/memory/memory.limit_in_bytes" ) {
-	    print "INFO: This system has the CGROUP file /sys/fs/cgroup/memory/memory.limit_in_bytes\n";
-	    while ( my $limitline = <CGROUPMEM> ) {
-		### TODO: NO, WE ARE NOT NECESSARILY IN DOCKER!
-		###print "We must be running in Docker! Fancy!\n";
-		# The goal of this cgroup check is for
-		# "Setting the heap limit for Glassfish/Payara to 750MB"
-		# to change to some other value, based on memory available.
-		print "INFO: /sys/fs/cgroup/memory/memory.limit_in_bytes: $limitline\n";
-		my $limit_in_kb = $limitline / 1024;
-		print "INFO: CGROUP limit_in_kb =  $limit_in_kb [ignoring]\n";
-		# In openshift.json, notice how PostgreSQL and Solr have
-		# resources.limits.memory set to "256Mi".
-		# If you try to give the Dataverse/Glassfish container twice
-		# as much memory (512 MB) and allow $sys_mem_total to
-		# be set below, you should see the following:
-		# "Setting the heap limit for Glassfish to 192MB."
-		# FIXME: dataverse.war will not deploy with only 512 MB of memory.
-		# Again, the goal is 1 GB total (512MB + 256MB + 256MB) for
-		# Glassfish, PostgreSQL, and Solr to fit in the free OpenShift tier.
-		#print "setting sys_mem_total to: $limit_in_kb\n";
-		#$sys_mem_total = $limit_in_kb;
-	    }
-	    close CGROUPMEM;
-	}
-    }
-    elsif ( -x "/usr/sbin/sysctl" ) 
-    {
-	# MacOS X, probably...
-
-	$sys_mem_total = `/usr/sbin/sysctl -n hw.memsize`;
-	chop $sys_mem_total;
-	if ( $sys_mem_total > 0 ) {
-	    $sys_mem_total = int( $sys_mem_total / 1024 );
-	    # size in kb
-	}
-    }
-
-    if ( $sys_mem_total > 0 ) {
-	# setting the default heap size limit to 3/8 of the available
-	# amount of memory:
-	$gf_heap_default = ( int( $sys_mem_total / ( 8 / 3 * 1024 ) ) );
-
-	print "\nSetting the heap limit for Payara5 to " . $gf_heap_default . "MB. \n";
-	print "You may need to adjust this setting to better suit \n";
-	print "your system.\n\n";
-
-	#$gf_heap_default .= "m";
-
-    }
-    else 
-    {
-	print "\nCould not determine the amount of memory on your system.\n";
-	print "Setting the heap limit for Payara5 to 2GB. You may need \n";
-	print "to  adjust the value to better suit your system.\n\n";
-    }
-
-    push @CONFIG_VARIABLES, "DEF_MEM_SIZE";
-    $CONFIG_DEFAULTS{"DEF_MEM_SIZE"} = $gf_heap_default;
-
-# TODO:
-# is the below still the case with Payara5? 
-# if the system has more than 4GB of memory (I believe), glassfish must
-# be run with the 64 bit flag set explicitly (at least that was the case
-# with the MacOS glassfish build...). Verify, and if still the case,
-# add a check.
-
-    print "\n*********************\n";
-    print "PLEASE NOTE, SOME OF THE ASADMIN COMMANDS ARE GOING TO FAIL,\n";
-    print "FOR EXAMPLE, IF A CONFIGURATION SETTING THAT WE ARE TRYING\n";
-    print "TO CREATE ALREADY EXISTS; OR IF A JVM OPTION THAT WE ARE\n";
-    print "DELETING DOESN'T. THESE \"FAILURES\" ARE NORMAL!\n";
-    print "*********************\n\n";
-    print "When/if asadmin asks you to \"Enter admin user name\",\n";
-    print "it should be safe to hit return and accept the default\n";
-    print "(which is \"admin\").\n";
-
-    print "\nPress any key to continue...\n\n";
-    
-    unless ($noninteractive)
-    {
-	system "stty cbreak </dev/tty >/dev/tty 2>&1";
-	unless ($noninteractive) {
-	    my $key = getc(STDIN);
-	}
-	system "stty -cbreak </dev/tty >/dev/tty 2>&1";
-    }
-	
-    print "\n";
-
-# 5b. start domain, if not running:
-    
-    my $javacheck = `java -version`;
-    my $exitcode  = $?;
-    unless ( $exitcode == 0 ) {
-	print STDERR "$javacheck\n" if $javacheck;
-	print STDERR "Do you have java installed?\n";
-	exit 1;
-    }
-    my $DOMAIN = "domain1";
-    my $DOMAIN_DOWN =
-	`$CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}/bin/asadmin list-domains | grep "$DOMAIN " | grep "not running"`;
-    print STDERR $DOMAIN_DOWN . "\n";
-    if ($DOMAIN_DOWN) {
-	print "Trying to start domain up...\n";
-	if ( $current_user eq $CONFIG_DEFAULTS{'GLASSFISH_USER'} ){
-		system( $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . "/bin/asadmin start-domain domain1" );
-	}
-	else
-	{
-		system( "sudo -u $CONFIG_DEFAULTS{'GLASSFISH_USER'} " . $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . "/bin/asadmin start-domain domain1" );
-	}
-	# TODO: (?) - retest that the domain is running now? 
-    }
-    else
-    {
-	print "domain appears to be up...\n";
-    }
-
-# 5c. create asadmin login, so that the user doesn't have to enter
-# the username and password for every asadmin command, if
-# access to :4848 is password-protected:
-
-    system( $glassfish_dir. "/bin/asadmin login" );
-    
-# 5d. configure glassfish using ASADMIN commands:
-  
-    $success = &run_asadmin_script();
-
-# CHECK EXIT STATUS, BARF IF SETUP SCRIPT FAILED:
-
-    unless ($success) {
-	print "\nERROR! Failed to configure Payara5 domain!\n";
-	print "(see the error messages above - if any)\n";
-	print "Aborting...\n";
-	
-	exit 1;
-    }
-    
-# 5e. Additional config files:
-    
-    my $JHOVE_CONFIG = "jhove.conf";
-    my $JHOVE_CONF_SCHEMA = "jhoveConfig.xsd";
-    
-
-    my $JHOVE_CONFIG_DIST = $JHOVE_CONFIG; 
-    my $JHOVE_CONF_SCHEMA_DIST = $JHOVE_CONF_SCHEMA; 
-    
-# (if the installer is being run NOT as part of a distribution zipped bundle, but
-# from inside the source tree - adjust the locations of the jhove config files:
-
-    unless ( -f $JHOVE_CONFIG ) {
-	$JHOVE_CONFIG_DIST = "../../conf/jhove/jhove.conf";
-	$JHOVE_CONF_SCHEMA_DIST = "../../conf/jhove/jhoveConfig.xsd";
-    }
-
-# but if we can't find the files in either location, it must mean
-# that they are not running the script in the correct directory - so 
-# nothing else left for us to do but give up:
-
-    unless ( -f $JHOVE_CONFIG_DIST && -f $JHOVE_CONF_SCHEMA_DIST ) {
-	print "\nERROR! JHOVE configuration files not found in the config dir!\n";
-	print "(are you running the installer in the right directory?\n";
-	print "Aborting...\n";
-	exit 1;
-    }
-
-    print "\nCopying additional configuration files... ";
-
-    #system( "/bin/cp -f " . $JHOVE_CONF_SCHEMA_DIST . " " . $glassfish_dir . "/glassfish/domains/domain1/config" );
-    my $jhove_success = copy ($JHOVE_CONF_SCHEMA_DIST, $glassfish_dir . "/glassfish/domains/domain1/config");
-    unless ($jhove_success) 
-    {
-	print "\n*********************\n";
-	print "ERROR: failed to copy jhove config file into " . $glassfish_dir . "/glassfish/domains/domain1/config - do you have write permission in that directory?";
-	exit 1;
-    }
-
-# The JHOVE conf file has an absolute PATH of the JHOVE config schema file (uh, yeah...)
-# - so it may need to be readjusted here: 
-
-    if ( $glassfish_dir ne "/usr/local/payara5" )
-    {
-	system( "sed 's:/usr/local/payara5:$glassfish_dir:g' < " . $JHOVE_CONFIG_DIST . " > " . $glassfish_dir . "/glassfish/domains/domain1/config/" . $JHOVE_CONFIG);
-    }
-    else 
-    {
-	system( "/bin/cp -f " . $JHOVE_CONFIG_DIST . " " . $glassfish_dir . "/glassfish/domains/domain1/config" );
-    }
-
-    print "done!\n";
-    
-# 5f. check if payara is running:
-# TODO.
-    
-# 5g. DEPLOY THE APPLICATION:
-
-    print "\nAttempting to deploy the application.\n";
-    print "Command line: " . $glassfish_dir . "/bin/asadmin deploy " . $WARFILE_LOCATION . "\n";
-    unless ((
-	my $exit_code =
-	system( $glassfish_dir . "/bin/asadmin deploy " . $WARFILE_LOCATION )
-	    ) == 0 )
-    {
-	print STDERR "Failed to deploy the application! WAR file: " . $WARFILE_LOCATION . ".\n";
-	print STDERR "(exit code: " . $exit_code . ")\n";
-	print STDERR "Aborting.\n";
-	exit 1;
-    }
-
-
-    print "Finished configuring Payara and deploying the dataverse application.  \n";
-
-
-    return $success;
-}
-
-sub run_asadmin_script {
-    my $success = 1;
-    my $failure = 0;
-
-    # We are going to run a standalone shell script with a bunch of asadmin
-    # commands to set up all the Payara components for the application.
-    # All the parameters must be passed to that script as environmental
-    # variables:
-
-    $ENV{'GLASSFISH_ROOT'}   = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'};
-    $ENV{'GLASSFISH_DOMAIN'} = "domain1";
-    $ENV{'ASADMIN_OPTS'}     = "";
-    $ENV{'MEM_HEAP_SIZE'}    = $CONFIG_DEFAULTS{'DEF_MEM_SIZE'};
-
-    $ENV{'DB_PORT'} = $CONFIG_DEFAULTS{'POSTGRES_PORT'};
-    $ENV{'DB_HOST'} = $CONFIG_DEFAULTS{'POSTGRES_SERVER'};
-    $ENV{'DB_NAME'} = $CONFIG_DEFAULTS{'POSTGRES_DATABASE'};
-    $ENV{'DB_USER'} = $CONFIG_DEFAULTS{'POSTGRES_USER'};
-    $ENV{'DB_PASS'} = $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'};
-
-    $ENV{'RSERVE_HOST'} = $CONFIG_DEFAULTS{'RSERVE_HOST'};
-    $ENV{'RSERVE_PORT'} = $CONFIG_DEFAULTS{'RSERVE_PORT'};
-    $ENV{'RSERVE_USER'} = $CONFIG_DEFAULTS{'RSERVE_USER'};
-    $ENV{'RSERVE_PASS'} = $CONFIG_DEFAULTS{'RSERVE_PASSWORD'};
-    $ENV{'DOI_BASEURL'} = $CONFIG_DEFAULTS{'DOI_BASEURL'};
-    $ENV{'DOI_USERNAME'} = $CONFIG_DEFAULTS{'DOI_USERNAME'};
-    $ENV{'DOI_PASSWORD'} = $CONFIG_DEFAULTS{'DOI_PASSWORD'};
-    $ENV{'DOI_DATACITERESTAPIURL'} = $CONFIG_DEFAULTS{'DOI_DATACITERESTAPIURL'};
-    
-    $ENV{'HOST_ADDRESS'} = $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'};
-
-    my ($mail_server_host, $mail_server_port) = split (":", $CONFIG_DEFAULTS{'MAIL_SERVER'});
-
-    $ENV{'SMTP_SERVER'}  = $mail_server_host;
-
-    if ($mail_server_port) 
-    {
-	$ENV{'SMTP_SERVER_PORT'} = $mail_server_port; 
-    }
-
-    $ENV{'FILES_DIR'} =
-      $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . "/glassfish/domains/" . $ENV{'GLASSFISH_DOMAIN'} . "/files";
-    
-    system("./as-setup.sh");
-
-    if ($?) {
-        return $failure;
-    }
-    return $success;
-}
-
-sub create_pg_hash {
-    my $pg_username = shift @_;
-    my $pg_password = shift @_;
-
-    my $encode_line = $pg_password . $pg_username;
-
-    # for Redhat:
-
-    ##print STDERR "executing /bin/echo -n $encode_line | md5sum\n";
-
-    my $hash;
-    if ( $WORKING_OS eq "MacOSX" ) {
-        $hash = `/bin/echo -n $encode_line | md5`;
-    }
-    else {
-        $hash = `/bin/echo -n $encode_line | md5sum`;
-    }
-
-    chop $hash;
-
-    $hash =~ s/  \-$//;
-
-    if ( ( length($hash) != 32 ) || ( $hash !~ /^[0-9a-f]*$/ ) ) {
-        print STDERR "Failed to generate a MD5-encrypted password hash for the Postgres database.\n";
-        exit 1;
-    }
-
-    return $hash;
-}
-
-sub validate_smtp_server {
-    my ( $mail_server_iaddr, $mail_server__paddr, $mail_server_proto, $mail_server_status );
-
-    $mail_server_status = 1;
-
-    my $userentry = $CONFIG_DEFAULTS{'MAIL_SERVER'};
-    my ($testserver, $testport) = split (":", $userentry);
-
-    unless ( $mail_server_iaddr = inet_aton( $testserver ) ) {
-	print STDERR "Could not look up $testserver,\n";
-	print STDERR "the host you specified as your mail server\n";
-	$mail_server_status = 0;
-    }
-
-    if ($mail_server_status) {
-	$testport = 25 unless $testport; 
-	my $mail_server_paddr = sockaddr_in( $testport, $mail_server_iaddr );
-	$mail_server_proto = getprotobyname('tcp');
-
-       unless ( socket( SOCK, PF_INET, SOCK_STREAM, $mail_server_proto )
-		 && connect( SOCK, $mail_server_paddr ) )
-	{
-	    print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'MAIL_SERVER'},\n";
-	    print STDERR "the address you provided for your Mail server.\n";
-	    print STDERR "Please select a valid mail server, and try again.\n\n";
-
-	    $mail_server_status = 0;
-	}
-
-	close(SOCK);
-    }
-
-    return $mail_server_status; 
-}
-
-# support function for set_root_contact_email
-sub search_replace_file
-{
-        my ($infile, $pattern, $replacement, $outfile) = @_;
-        open (my $inp, $infile);
-        local $/ = undef;
-        my $txt = <$inp>;
-        close $inp;
-        $txt =~s/$pattern/$replacement/g;
-        open (my $opf, '>:encoding(UTF-8)', $outfile);
-        print $opf $txt;
-        close $opf;
-        return;
-}
-# set the email address for the default `dataverseAdmin` account
-sub set_root_contact_email
-{
-        my ($contact_email) = @_;
-        my $config_json = "data/user-admin.json";
-        search_replace_file($config_json,"\"email\":\"dataverse\@mailinator.com\"","\"email\":\"$contact_email\"",$config_json);
-        return;
-}
-
-
-sub setup_postgres {
-    my $pg_local_connection = 0;
-    my $pg_major_version = 0;
-    my $pg_minor_version = 0;
-
-
-# We'll need to do a few things as the Postgres admin user; 
-# We'll assume the name of the admin user is "postgres". 
-    my $POSTGRES_ADMIN_USER = "postgres";
-
-
-
-##Handling container env
-
-    if ($pod_name eq "start-glassfish")
-    {
-	# When we are in this openshift "start-glassfish" pod, we get all the 
-	# Postgres configuration from the environmental variables. 
-	print "Init container starting \n";
-	$CONFIG_DEFAULTS{'POSTGRES_SERVER'} = $ENV{"POSTGRES_SERVER"} . "." .  $ENV{"POSTGRES_SERVICE_HOST"};
-	$CONFIG_DEFAULTS{'POSTGRES_DATABASE'} = $ENV{"POSTGRES_DATABASE"};
-	$CONFIG_DEFAULTS{'POSTGRES_USER'} = $ENV{"POSTGRES_USER"};
-	$CONFIG_DEFAULTS{'POSTGRES_ADMIN_PASSWORD'} = $ENV{"POSTGRES_ADMIN_PASSWORD"};
-	# there was a weird case of the postgres admin password option spelled differently in openshift.json 
-	# - as "POSTGRESQL_ADMIN_PASSWORD"; I'm going to change it in openshift.json - but I'm leaving this
-	# next line here, just in case: (L.A. -- Sept. 2018)
-	$CONFIG_DEFAULTS{'POSTGRES_ADMIN_PASSWORD'} = $ENV{'POSTGRESQL_ADMIN_PASSWORD'};
-	$CONFIG_DEFAULTS{'POSTGRES_PASSWORD'} = $ENV{"POSTGRES_PASSWORD"};
-    }
-
-    if ( $CONFIG_DEFAULTS{'POSTGRES_SERVER'} eq 'localhost' || $CONFIG_DEFAULTS{'POSTGRES_SERVER'} eq '127.0.0.1' ) 
-    {
-	$pg_local_connection = 1;
-    } 
-#    elsif ($postgresonly) 
-#    {
-#	print "In the --pg_only mode the script can only be run LOCALLY,\n";
-#	print "i.e., on the server where PostgresQL is running, with the\n";
-#	print "Postgres server address as localhost - \"127.0.0.1\".\n";
-#	exit 1;
-#    }
-
-#If it is executing in a container, proceed easy with this all-in-one block
-
-
-
-
-# 3b. LOCATE THE psql EXECUTABLE:
-
-    if ( $pod_name eq "start-glassfish"){
-        $psql_exec_path = "/usr/bin"    
-    } 
-    else 
-    {
-	my $sys_path = $ENV{'PATH'};
-	my @sys_path_dirs = split( ":", $sys_path );
-
-	for my $sys_path_dir (@sys_path_dirs) {
-	    
-	    if ( -x $sys_path_dir . "/psql" ) {
-		$psql_exec_path = $sys_path_dir;
-
-		last;
-	    }
-	}
-    }
-
-    my $psql_major_version = 0;
-    my $psql_minor_version = 0;
-
-# 3c. IF PSQL WAS FOUND IN THE PATH, CHECK ITS VERSION:
-
-    unless ( $psql_exec_path eq "" ) {
-	open( PSQLOUT, $psql_exec_path . "/psql --version|" );
-
-	my $psql_version_line = <PSQLOUT>;
-	chop $psql_version_line;
-	close PSQLOUT;
-    
-	my ( $postgresName, $postgresNameLong, $postgresVersion ) = split( " ", $psql_version_line );
-
-	unless ( $postgresName eq "psql" && $postgresVersion =~ /^[0-9][0-9\.]*$/ ) {
-	    print STDERR "\nWARNING: Unexpected output from psql command!\n";
-	}
-	else 
-	{
-	    my (@psql_version_tokens) = split( '\.', $postgresVersion );
-
-	    print "\n\nFound Postgres psql command, version $postgresVersion.\n\n";
-
-	    $psql_major_version = $psql_version_tokens[0];
-	    $psql_minor_version = $psql_version_tokens[1];
-
-	    $pg_major_version = $psql_major_version;
-	    $pg_minor_version = $psql_minor_version;
-
-	}
-    }
-
-# a frequent problem with MacOSX is that the copy of psql found in the PATH
-# belongs to the older version of PostgresQL supplied with the OS, which happens
-# to be incompatible with the newer builds from the Postgres project; which are
-# recommended to be used with Dataverse. So if this is a MacOSX box, we'll
-# check what other versions of PG are available, and select the highest version
-# we can find:
-
-    if ( $WORKING_OS eq "MacOSX" ) {
-	my $macos_pg_major_version = 0;
-	my $macos_pg_minor_version = 0;
-	
-	for $macos_pg_minor_version ( "9", "8", "7", "6", "5", "4", "3", "2", "1", "0" ) {
-	    if ( -x "/Library/PostgreSQL/9." . $macos_pg_minor_version . "/bin/psql" ) {
-		$macos_pg_major_version = 9;
-		if (   ( $macos_pg_major_version > $psql_major_version )
-		       || ( $macos_pg_minor_version >= $psql_minor_version ) )
-		{
-		    $psql_exec_path        = "/Library/PostgreSQL/9." . $macos_pg_minor_version . "/bin";
-		    $pg_major_version = $macos_pg_major_version;
-		    $pg_minor_version = $macos_pg_minor_version;
-		}
-		last;
-	    }
-	}
-    }
-
-    my $psql_admin_exec = "";
-
-    if ( $psql_exec_path eq "" ) 
-    { 
-	if ( $pg_local_connection || $noninteractive) 
-	{
-	    print STDERR "\nERROR: I haven't been able to find the psql command in your PATH!\n";
-	    print STDERR "Please make sure PostgresQL is properly installed; if necessary, add\n";
-	    print STDERR "the location of psql to the PATH, then try again.\n\n";
-	    
-	    exit 1;
-	}
-	else 
-	{
-	    print "WARNING: I haven't been able to find the psql command in your PATH!\n";
-	    print "But since we are configuring a Dataverse instance to use a remote Postgres server,\n";
-	    print "we can still set up the database by running a setup script on that remote server\n";
-	    print "(see below for instructions).\n";
-	    
-	}
-    } else {
-
-	print "(Using psql version " . $pg_major_version . "." . $pg_minor_version . ": " . $psql_exec_path . "/psql)\n";
-
-
-	$psql_admin_exec = "PGPASSWORD=" . $CONFIG_DEFAULTS{'POSTGRES_ADMIN_PASSWORD'} . "; export PGPASSWORD; " . $psql_exec_path; 
-	$psql_exec_path = "PGPASSWORD=" . $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'} . "; export PGPASSWORD; " . $psql_exec_path; 
-
-	print "Checking if we can talk to Postgres as the admin user...\n";
-    }
-    
-# 3d. CHECK IF WE CAN TALK TO POSTGRES AS THE ADMIN:
-
-    if ($psql_exec_path eq "" || system( $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1" ) ) 
-    {
-	# No, we can't. :(
-	if ($pg_local_connection || $noninteractive) 
-	{
-	    # If Postgres is running locally, this is a fatal condition. 
-	    # We'll give them some (potentially) helpful pointers and exit.
-
-	    print "(Tried executing: " . $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1) \n";
-	    print "Nope, I haven't been able to connect to the local instance of PostgresQL as the admin user.\n";
-	    print "\nIs postgresql running? \n";
-	    print "   On a RedHat-like system, you can check the status of the daemon with\n\n";
-	    print "      service postgresql start\n\n";
-	    print "   On MacOSX, use Applications -> PostgresQL -> Start Server.\n";
-	    print "   (or, if there's no \"Start Server\" item in your PostgresQL folder, \n";
-	    print "   simply restart your MacOSX system!)\n";
-	    print "\nAlso, please make sure that the daemon is listening to network connections!\n";
-	    print "   - at least on the localhost interface. (See \"Installing Postgres\" section\n";
-	    print "   of the installation manual).\n";
-	    print "\nFinally, did you supply the correct admin password?\n";
-	    print "   Don't know the admin password for your Postgres installation?\n";
-	    print "   - then simply set the access level to \"trust\" temporarily (for localhost only!)\n";
-	    print "   in your pg_hba.conf file. Again, please consult the \n";
-	    print "   installation manual).\n";
-	    exit 1;
-	}
-	else 
-	{
-	    # If we are configuring the Dataverse instance to use a Postgres server 
-	    # running on a remote host, it is possible to configure the database
-	    # without opening remote access for the admin user. They will simply 
-	    # have to run this script in the "postgres-only" mode on that server, locally, 
-	    # then resume the installation here: 
-	    print "(Tried executing: " . $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1)\n\n";
-	    print "Haven't been able to connect to the remote Postgres server as the admin user.\n";
-	    print "(Or you simply don't have psql installed on this server)\n";
-	    print "It IS possible to configure a database for your Dataverse on a remote server,\n";
-	    print "without having admin access to that remote Postgres installation.\n\n";
-	    print "In order to do that, please copy the installer (the entire package) to the server\n";
-	    print "where PostgresQL is running and run the installer with the \"--pg_only\" option:\n\n";
-	    print "   ./install --pg_only\n\n";
-
-	    print "Press any key to continue the installation process once that has been\n";
-	    print "done. Or press ctrl-C to exit the installer.\n\n";
-	    
-	    system "stty cbreak </dev/tty >/dev/tty 2>&1";
-	    my $key = getc(STDIN);
-	    system "stty -cbreak </dev/tty >/dev/tty 2>&1";
-	    print "\n";
-	}
-    }
-    else 
-    {
-	print "Yes, we can!\n";
-
-	# ok, we can proceed with configuring things...
-
-	print "\nConfiguring Postgres Database:\n";
-
-	# 4c. CHECK IF THIS DB ALREADY EXISTS:
-    
-	my $psql_command_dbcheck =
-	    $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -c '' -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ">/dev/null 2>&1";
-
-	if ( ( my $exitcode = system($psql_command_dbcheck) ) == 0 ) 
-	{    
-	    if ($force) 
-	    {
-		print "WARNING! Database "
-		    . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'}
-		. " already exists but --force given... continuing.\n";
-	    } 
-	    else 
-	    {
-		print "WARNING! Database " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " already exists!\n";
-
-		if ($noninteractive)
-		{
-		    exit 1;
-		}
-		else 
-		{
-		    print "\nPress any key to continue, or ctrl-C to exit the installer...\n\n";
-        
-		    system "stty cbreak </dev/tty >/dev/tty 2>&1";
-		    my $key = getc(STDIN);
-		    system "stty -cbreak </dev/tty >/dev/tty 2>&1";
-		    print "\n";
-
-		}
-	    }
-	}
-
-	# 3e. CHECK IF THIS USER ALREADY EXISTS:
-    
-	my $psql_command_rolecheck =
-	    $psql_exec_path . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -c '' -d postgres " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " >/dev/null 2>&1";
-	my $exitcode; 
-
-	if ( ( $exitcode = system($psql_command_rolecheck) ) == 0 ) 
-	{
-	    print "User (role) " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " already exists;\n";
-	    print "Proceeding.";
-	}
-	else 
-	{
-	    # 3f. CREATE DVN DB USER:
-	    
-	    print "\nCreating Postgres user (role) for the DVN:\n";
-    
-	    open TMPCMD, ">/tmp/pgcmd.$$.tmp";
-
-	    # with md5-encrypted password:
-	    my $pg_password_md5 =
-		&create_pg_hash( $CONFIG_DEFAULTS{'POSTGRES_USER'}, $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'} );
-	    my $sql_command =
-		"CREATE ROLE \""
-		. $CONFIG_DEFAULTS{'POSTGRES_USER'}
-	    . "\" PASSWORD 'md5"
-		. $pg_password_md5
-		. "' NOSUPERUSER CREATEDB CREATEROLE INHERIT LOGIN";
-
-	    print TMPCMD $sql_command;
-	    close TMPCMD;
-        
-	    my $psql_commandline = $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -f /tmp/pgcmd.$$.tmp >/dev/null 2>&1";
-
-	    my $out      = qx($psql_commandline 2>&1);
-	    $exitcode = $?;
-	    unless ( $exitcode == 0 ) 
-	    {
-		print STDERR "Could not create the DVN Postgres user role!\n";
-		print STDERR "(SQL: " . $sql_command . ")\n";
-		print STDERR "(psql exit code: " . $exitcode . ")\n";
-		print STDERR "(STDERR and STDOUT was: " . $out . ")\n";
-		exit 1;
-	    }
-
-	    unlink "/tmp/pgcmd.$$.tmp";
-	    print "done.\n";
-	}
-    
-	# 3g. CREATE DVN DB:
-    
-	print "\nCreating Postgres database:\n";
-    
-	my $psql_command =
-	    $psql_admin_exec
-	    . "/createdb -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER ." "
-	    . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " --owner="
-	    . $CONFIG_DEFAULTS{'POSTGRES_USER'};
-
-	my $out = qx($psql_command 2>&1);
-        $exitcode = $?;
-	unless ( $exitcode == 0 ) 
-	{
-	    print STDERR "Could not create Postgres database for the Dataverse app!\n";
-	    print STDERR "(command: " . $psql_command . ")\n";
-	    print STDERR "(psql exit code: " . $exitcode . ")\n";
-	    print STDERR "(STDOUT and STDERR: " . $out . ")\n";
-	    if ($force) 
-	    {
-		print STDERR "\ncalled with --force, continuing\n";
-	    }
-	    else 
-	    {
-		print STDERR "\naborting the installation (sorry!)\n\n";
-		exit 1;
-	    }
-	}
-    }
-
-# Whether the user and the database were created locally or remotely, we'll now 
-# verify that we can talk to that database, with the credentials of the database
-# user that we want the Dataverse application to be using: 
-
-    if ( $psql_exec_path ne "" && system( $psql_exec_path . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " -c 'SELECT * FROM pg_roles' > /dev/null 2>&1" ) ) 
-    {
-	print STDERR "Oops, haven't been able to connect to the database " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ",\n";
-	print STDERR "running on " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . ", as user " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . ".\n\n";
-	print STDERR "Aborting the installation (sorry!)\n";
-	exit 1; 
-    }
-}
-
-sub read_config_defaults {
-    my $config_file = shift @_;
-
-    unless ( -f $config_file ) 
-    {
-	print STDERR "Can't find the config file " . $config_file . "!\n";
-	exit 1;
-    }
-
-    open CF, $config_file || die "Can't open config file " . $config_file . " for reading.\n"; 
-
-    while (<CF>)
-    {
-	chop; 
-
-	if ( $_ =~/^[A-Z]/ && $_ =~/ *= */ ) 
-	{
-	    my ($name, $value) = split(/ *= */, $_, 2);
-	    $CONFIG_DEFAULTS{$name} = $value;
-	}
-    }
-    close CF;
-}
-
-sub read_interactive_config_values {
-    my $config_file = shift @_;
-
-    unless ( -f $config_file ) 
-    {
-	print STDERR "Can't find the config file " . $config_file . "!\n";
-	exit 1;
-    }
-
-    open CF, $config_file || die "Can't open config file " . $config_file . " for reading.\n"; 
-
-    my $mode = "";
-
-    while (<CF>)
-    {
-	chop; 
-
-	if ( $_ eq "[prompts]" || $_ eq "[comments]" )
-	{
-	    $mode = $_; 
-	}
-
-	if ( $_ =~/^[A-Z]/ && $_ =~/ *= */ ) 
-	{
-	    my ($name, $value) = split(/ *= */, $_, 2);
-	    
-	    if ( $mode eq "[prompts]" ) 
-	    {
-		$CONFIG_PROMPTS{$name} = $value;
-	    }
-	    elsif ( $mode eq "[comments]" )
-	    {
-		$value =~s/\\n/\n/g;
-		$CONFIG_COMMENTS{$name} = $value;
-	    }
-	}
-    }
-    close CF;
-}
diff --git a/scripts/installer/install.py b/scripts/installer/install.py
index 5acb4d760a4..3aedbd8c6ad 100644
--- a/scripts/installer/install.py
+++ b/scripts/installer/install.py
@@ -252,8 +252,8 @@
    # 1d. check java version
    java_version = subprocess.check_output(["java", "-version"], stderr=subprocess.STDOUT).decode()
    print("Found java version "+java_version)
-   if not re.search('(1.8|11)', java_version):
-      sys.exit("Dataverse requires OpenJDK 1.8 or 11. Please make sure it's in your PATH, and try again.")
+   if not re.search('(17)', java_version):
+      sys.exit("Dataverse requires OpenJDK 17. Please make sure it's in your PATH, and try again.")
 
    # 1e. check if the setup scripts - setup-all.sh, are available as well, maybe?
    # @todo (?)
@@ -314,7 +314,7 @@
                   gfDir = config.get('glassfish', 'GLASSFISH_DIRECTORY')
                   while not test_appserver_directory(gfDir):
                      print("\nInvalid Payara directory!")
-                     gfDir = read_user_input("Enter the root directory of your Payara5 installation:\n(Or ctrl-C to exit the installer): ")
+                     gfDir = read_user_input("Enter the root directory of your Payara installation:\n(Or ctrl-C to exit the installer): ")
                   config.set('glassfish', 'GLASSFISH_DIRECTORY', gfDir)
                elif option == "mail_server":
                   mailServer = config.get('system', 'MAIL_SERVER')
@@ -380,12 +380,13 @@
       print("Can't connect to PostgresQL as the admin user.\n")
       sys.exit("Is the server running, have you adjusted pg_hba.conf, etc?")
 
-   # 3b. get the Postgres version (do we need it still?)
+   # 3b. get the Postgres version for new permissions model in versions 15+
    try:
-      pg_full_version = conn.server_version
-      print("PostgresQL version: "+str(pg_full_version))
+      pg_full_version = str(conn.server_version)
+      pg_major_version = pg_full_version[0:2]
+      print("PostgreSQL version: "+pg_major_version)
    except:
-      print("Warning: Couldn't determine PostgresQL version.")
+      print("Warning: Couldn't determine PostgreSQL version.")
    conn.close()
 
    # 3c. create role:
@@ -410,7 +411,9 @@
       else:
          sys.exit("Couldn't create database or database already exists.\n")
 
-   conn_cmd = "GRANT ALL PRIVILEGES on DATABASE "+pgDb+" to "+pgUser+";"
+   # 3e. set permissions:
+
+   conn_cmd = "GRANT CREATE PRIVILEGES on DATABASE "+pgDb+" to "+pgUser+";"
    try:
       cur.execute(conn_cmd)
    except:
@@ -418,6 +421,19 @@
    cur.close()
    conn.close()
 
+   if int(pg_major_version) >= 15:
+      conn_cmd = "GRANT ALL ON SCHEMA public TO "+pgUser+";"
+      print("PostgreSQL 15 or higher detected. Running " + conn_cmd)
+      try:
+         cur.execute(conn_cmd)
+      except:
+         if force:
+            print("WARNING: failed to grant permissions on schema public - continuing, since the --force option was specified")
+         else:
+            sys.exit("Couldn't grant privileges on schema public to "+pgUser)
+      cur.close()
+      conn.close()
+
    print("Database and role created!")
    if pgOnly:
       print("postgres-only setup complete.")
@@ -511,12 +527,12 @@
 try: 
    copy2(jhoveConfigSchemaDist, gfConfigDir)
    # The JHOVE conf file has an absolute PATH of the JHOVE config schema file (uh, yeah...)
-   # and may need to be adjusted, if Payara is installed anywhere other than /usr/local/payara5:
-   if gfDir == "/usr/local/payara5":
+   # and may need to be adjusted, if Payara is installed anywhere other than /usr/local/payara6:
+   if gfDir == "/usr/local/payara6":
       copy2(jhoveConfigDist, gfConfigDir)
    else:
-      # use sed to replace /usr/local/payara5 in the distribution copy with the real gfDir:
-      sedCommand = "sed 's:/usr/local/payara5:"+gfDir+":g' < " + jhoveConfigDist + " > " + gfConfigDir + "/" + jhoveConfig
+      # use sed to replace /usr/local/payara6 in the distribution copy with the real gfDir:
+      sedCommand = "sed 's:/usr/local/payara6:"+gfDir+":g' < " + jhoveConfigDist + " > " + gfConfigDir + "/" + jhoveConfig
       subprocess.call(sedCommand, shell=True)
 
    print("done.")
diff --git a/scripts/installer/installAppServer.py b/scripts/installer/installAppServer.py
index 8b719ac09d1..698f5ba9a58 100644
--- a/scripts/installer/installAppServer.py
+++ b/scripts/installer/installAppServer.py
@@ -3,7 +3,7 @@
 
 def runAsadminScript(config):
    # We are going to run a standalone shell script with a bunch of asadmin                                      
-   # commands to set up all the app. server (payara5) components for the application.                                       
+   # commands to set up all the app. server (payara6) components for the application.
    # All the parameters must be passed to that script as environmental                                          
    # variables:
    os.environ['GLASSFISH_DOMAIN'] = "domain1";
diff --git a/scripts/installer/installUtils.py b/scripts/installer/installUtils.py
index 7cc368de5f8..ff5e6eb708d 100644
--- a/scripts/installer/installUtils.py
+++ b/scripts/installer/installUtils.py
@@ -57,7 +57,7 @@ def test_appserver_directory(directory):
 
         #print("version: major: "+str(major_version)+", minor: "+str(minor_version))
 
-        if major_version != 5 or minor_version < 201:
+        if major_version != 6 or minor_version < 2023:
             return False
         return True
 
diff --git a/scripts/installer/interactive.config b/scripts/installer/interactive.config
index 86ea926fe5d..ef8110c554f 100644
--- a/scripts/installer/interactive.config
+++ b/scripts/installer/interactive.config
@@ -26,7 +26,7 @@ DOI_BASEURL = Datacite URL
 DOI_DATACITERESTAPIURL = Datacite REST API URL
 [comments]
 HOST_DNS_ADDRESS = :(enter numeric IP address, if FQDN is unavailable)
-GLASSFISH_USER = :This user will be running the App. Server (Payara5) service on your system.\n - If this is a dev. environment, this should be your own username; \n - In production, we suggest you create the account "dataverse", or use any other unprivileged user account\n:
+GLASSFISH_USER = :This user will be running the App. Server (Payara) service on your system.\n - If this is a dev. environment, this should be your own username; \n - In production, we suggest you create the account "dataverse", or use any other unprivileged user account\n:
 GLASSFISH_DIRECTORY = 
 GLASSFISH_REQUEST_TIMEOUT = :\n Defaults to 1800 seconds (30 minutes)
 ADMIN_EMAIL = :\n(please enter a valid email address!) 
diff --git a/scripts/tests/ec2-memory-benchmark/ec2-memory-benchmark-remote.sh b/scripts/tests/ec2-memory-benchmark/ec2-memory-benchmark-remote.sh
index 0cfdd20c272..367aa214563 100755
--- a/scripts/tests/ec2-memory-benchmark/ec2-memory-benchmark-remote.sh
+++ b/scripts/tests/ec2-memory-benchmark/ec2-memory-benchmark-remote.sh
@@ -5,7 +5,7 @@ then
     EC2_HTTP_LOCATION="<EC2 INSTANCE HTTP ADDRESS>"
 fi
 
-DATAVERSE_APP_DIR=/usr/local/payara5/glassfish/domains/domain1/applications/dataverse; export DATAVERSE_APP_DIR
+DATAVERSE_APP_DIR=/usr/local/payara6/glassfish/domains/domain1/applications/dataverse; export DATAVERSE_APP_DIR
 
 # restart app server
 
diff --git a/scripts/vagrant/install-dataverse.sh b/scripts/vagrant/install-dataverse.sh
deleted file mode 100644
index c9873f7d3ec..00000000000
--- a/scripts/vagrant/install-dataverse.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env bash
-
-if [ ! -z "$1" ]; then
-  MAILSERVER=$1
-  MAILSERVER_ARG="--mailserver $MAILSERVER"
-fi
-WAR=/dataverse/target/dataverse*.war
-if [ ! -f $WAR ]; then
-  echo "no war file found... building"
-  #echo "Installing nss on CentOS 6 to avoid java.security.KeyException while building war file: https://github.com/IQSS/dataverse/issues/2744"
-  #yum install -y nss
-  su $SUDO_USER -s /bin/sh -c "cd /dataverse && source /etc/profile.d/maven.sh && mvn -q package"
-fi
-cd /dataverse/scripts/installer
-
-# move any pre-existing `default.config` file out of the way to avoid overwriting
-pid=$$
-if [ -e default.config ]; then
-	cp default.config tmp-${pid}-default.config
-fi
-
-# Switch to newer Python-based installer
-python3 ./install.py --noninteractive --config_file="default.config"
-
-if [ -e tmp-${pid}-default.config ]; then # if we moved it out, move it back
-	mv -f tmp-${pid}-default.config default.config
-fi
-
-echo "If "vagrant up" was successful (check output above) Dataverse is running on port 8080 of the Linux machine running within Vagrant, but this port has been forwarded to port 8088 of the computer you ran "vagrant up" on. For this reason you should go to http://localhost:8088 to see the Dataverse app running."
-
-echo "Please also note that the installation script has now started Payara, but has not set up an autostart mechanism for it.\nTherefore, the next time this VM is started, Payara must be started manually.\nSee https://guides.dataverse.org/en/latest/installation/prerequisites.html#launching-payara-on-system-boot for details."
diff --git a/scripts/vagrant/rpmbuild.sh b/scripts/vagrant/rpmbuild.sh
deleted file mode 100755
index f10830afb5b..00000000000
--- a/scripts/vagrant/rpmbuild.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/sh
-rpm -Uvh http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-7.noarch.rpm
-yum install -y rpm-build httpd-devel libapreq2-devel R-devel
diff --git a/scripts/vagrant/setup-counter-processor.sh b/scripts/vagrant/setup-counter-processor.sh
deleted file mode 100755
index a418e8d6251..00000000000
--- a/scripts/vagrant/setup-counter-processor.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-echo "Setting up counter-processor"
-echo "Installing dependencies"
-yum -y install unzip vim-enhanced
-yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
-# EPEL provides Python 3.6.6, new enough (3.6.4 in .python-version)
-yum -y install python36 jq
-# "ensurepip" tip from https://stackoverflow.com/questions/50408941/recommended-way-to-install-pip3-on-centos7/52518512#52518512
-python3.6 -m ensurepip
-# FIXME: actually use this dedicated "counter" user.
-COUNTER_USER=counter
-echo "Ensuring Unix user '$COUNTER_USER' exists"
-useradd $COUNTER_USER || :
-COMMIT='7974dad259465ba196ef639f48dea007cae8f9ac'
-UNZIPPED_DIR="counter-processor-$COMMIT"
-if [ ! -e $UNZIPPED_DIR ]; then
-  ZIP_FILE="${COMMIT}.zip"
-  echo "Downloading and unzipping $ZIP_FILE"
-  wget https://github.com/CDLUC3/counter-processor/archive/$ZIP_FILE
-  unzip $ZIP_FILE
-fi
-cd $UNZIPPED_DIR
-echo "Installation of the GeoLite2 country database for counter-processor can no longer be automated. See the Installation Guide for the manual installation process."
-pip3 install -r requirements.txt
-# For now, parsing sample_logs/counter_2018-05-08.log
-for i in `echo {00..31}`; do
-  # avoid errors like: No such file or directory: 'sample_logs/counter_2018-05-01.log'
-  touch sample_logs/counter_2018-05-$i.log
-done
-#LOG_GLOB="sample_logs/counter_2018-05-*.log"
-#START_DATE="2018-05-08"
-#END_DATE="2018-05-09"
-CONFIG_FILE=/dataverse/scripts/vagrant/counter-processor-config.yaml python3.6 main.py
diff --git a/scripts/vagrant/setup-solr.sh b/scripts/vagrant/setup-solr.sh
deleted file mode 100755
index 70d3fc632a7..00000000000
--- a/scripts/vagrant/setup-solr.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/bin/bash
-echo "Setting up Solr"
-dnf install -qy lsof
-SOLR_USER=solr
-SOLR_HOME=/usr/local/solr
-mkdir $SOLR_HOME
-chown $SOLR_USER:$SOLR_USER $SOLR_HOME
-su $SOLR_USER -s /bin/sh -c "cp /dataverse/downloads/solr-8.11.1.tgz $SOLR_HOME"
-su $SOLR_USER -s /bin/sh -c "cd $SOLR_HOME && tar xfz solr-8.11.1.tgz"
-su $SOLR_USER -s /bin/sh -c "cd $SOLR_HOME/solr-8.11.1/server/solr && cp -r configsets/_default . && mv _default collection1"
-su $SOLR_USER -s /bin/sh -c "cp /dataverse/conf/solr/8.11.1/schema*.xml $SOLR_HOME/solr-8.11.1/server/solr/collection1/conf/"
-su $SOLR_USER -s /bin/sh -c "cp /dataverse/conf/solr/8.11.1/solrconfig.xml $SOLR_HOME/solr-8.11.1/server/solr/collection1/conf/solrconfig.xml"
-su $SOLR_USER -s /bin/sh -c "cd $SOLR_HOME/solr-8.11.1 && bin/solr start && bin/solr create_core -c collection1 -d server/solr/collection1/conf/"
-cp /dataverse/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr /etc/init.d/solr
-chmod 755 /etc/init.d/solr
-/etc/init.d/solr stop
-/etc/init.d/solr start
-chkconfig solr on
diff --git a/scripts/vagrant/setup.sh b/scripts/vagrant/setup.sh
deleted file mode 100644
index 0af4afb22af..00000000000
--- a/scripts/vagrant/setup.sh
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/bin/bash
-echo "Installing dependencies for Dataverse"
-
-# wget seems to be missing in box 'bento/centos-8.2'
-dnf install -qy wget
-
-# python3 and psycopg2 for the Dataverse installer
-dnf install -qy python3 python3-psycopg2
-
-# JQ
-echo "Installing jq for the setup scripts"
-dnf install -qy epel-release
-dnf install -qy jq
-
-echo "Adding Shibboleth yum repo"
-cp /dataverse/conf/vagrant/etc/yum.repos.d/shibboleth.repo /etc/yum.repos.d
-# Uncomment this (and other shib stuff below) if you want
-# to use Vagrant (and maybe PageKite) to test Shibboleth.
-#yum install -y shibboleth shibboleth-embedded-ds
-
-# java configuration et alia
-dnf install -qy java-11-openjdk-devel httpd mod_ssl unzip
-alternatives --set java /usr/lib/jvm/jre-11-openjdk/bin/java
-java -version
-
-# maven included in centos8 requires 1.8.0 - download binary instead
-wget -q https://archive.apache.org/dist/maven/maven-3/3.8.2/binaries/apache-maven-3.8.2-bin.tar.gz
-tar xfz apache-maven-3.8.2-bin.tar.gz
-mkdir /opt/maven
-mv apache-maven-3.8.2/* /opt/maven/
-echo "export JAVA_HOME=/usr/lib/jvm/jre-openjdk" > /etc/profile.d/maven.sh
-echo "export M2_HOME=/opt/maven" >> /etc/profile.d/maven.sh
-echo "export MAVEN_HOME=/opt/maven" >> /etc/profile.d/maven.sh
-echo "export PATH=/opt/maven/bin:${PATH}" >> /etc/profile.d/maven.sh
-chmod 0755 /etc/profile.d/maven.sh
-
-# disable centos8 postgresql module and install postgresql13-server
-dnf -qy module disable postgresql
-dnf install -qy https://download.postgresql.org/pub/repos/yum/reporpms/EL-8-x86_64/pgdg-redhat-repo-latest.noarch.rpm
-dnf install -qy postgresql13-server
-/usr/pgsql-13/bin/postgresql-13-setup initdb
-/usr/bin/systemctl stop postgresql-13
-cp /dataverse/conf/vagrant/var/lib/pgsql/data/pg_hba.conf /var/lib/pgsql/13/data/pg_hba.conf
-/usr/bin/systemctl start postgresql-13
-/usr/bin/systemctl enable postgresql-13
-
-PAYARA_USER=dataverse
-echo "Ensuring Unix user '$PAYARA_USER' exists"
-useradd $PAYARA_USER || :
-SOLR_USER=solr
-echo "Ensuring Unix user '$SOLR_USER' exists"
-useradd $SOLR_USER || :
-DOWNLOAD_DIR='/dataverse/downloads'
-PAYARA_ZIP="$DOWNLOAD_DIR/payara-5.2022.3.zip"
-SOLR_TGZ="$DOWNLOAD_DIR/solr-8.11.1.tgz"
-if [ ! -f $PAYARA_ZIP ] || [ ! -f $SOLR_TGZ ]; then
-    echo "Couldn't find $PAYARA_ZIP or $SOLR_TGZ! Running download script...."
-    cd $DOWNLOAD_DIR && ./download.sh && cd
-    echo "Done running download script."
-fi
-PAYARA_USER_HOME=~dataverse
-PAYARA_ROOT=/usr/local/payara5
-if [ ! -d $PAYARA_ROOT ]; then
-  echo "Copying $PAYARA_ZIP to $PAYARA_USER_HOME and unzipping"
-  su $PAYARA_USER -s /bin/sh -c "cp $PAYARA_ZIP $PAYARA_USER_HOME"
-  su $PAYARA_USER -s /bin/sh -c "cd $PAYARA_USER_HOME && unzip -q $PAYARA_ZIP"
-  # default.config defaults to /usr/local/payara5 so let's go with that
-  rsync -a $PAYARA_USER_HOME/payara5/ $PAYARA_ROOT/
-else
-  echo "$PAYARA_ROOT already exists"
-fi
-
-#service shibd start
-/usr/bin/systemctl stop httpd
-cp /dataverse/conf/httpd/conf.d/dataverse.conf /etc/httpd/conf.d/dataverse.conf
-mkdir -p /var/www/dataverse/error-documents
-cp /dataverse/conf/vagrant/var/www/dataverse/error-documents/503.html /var/www/dataverse/error-documents
-/usr/bin/systemctl start httpd
-#curl -k --sslv3 https://pdurbin.pagekite.me/Shibboleth.sso/Metadata > /tmp/pdurbin.pagekite.me
-#cp -a /etc/shibboleth/shibboleth2.xml /etc/shibboleth/shibboleth2.xml.orig
-#cp -a /etc/shibboleth/attribute-map.xml /etc/shibboleth/attribute-map.xml.orig
-# need more attributes, such as sn, givenName, mail
-#cp /dataverse/conf/vagrant/etc/shibboleth/attribute-map.xml /etc/shibboleth/attribute-map.xml
-# FIXME: automate this?
-#curl 'https://www.testshib.org/cgi-bin/sp2config.cgi?dist=Others&hostname=pdurbin.pagekite.me' > /etc/shibboleth/shibboleth2.xml
-#cp /dataverse/conf/vagrant/etc/shibboleth/shibboleth2.xml /etc/shibboleth/shibboleth2.xml
-#service shibd restart
-#curl -k --sslv3 https://pdurbin.pagekite.me/Shibboleth.sso/Metadata > /downloads/pdurbin.pagekite.me
-#service httpd restart
-
-echo "#########################################################################################"
-echo "# This is a Vagrant test box, so we're disabling firewalld. 			      #
-echo "# Re-enable it with $ sudo systemctl enable firewalld && sudo systemctl start firewalld #"
-echo "#########################################################################################"
-systemctl disable firewalld
-systemctl stop firewalld
diff --git a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java
index 2a3f2d50364..f1bfc3e290b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java
@@ -3,8 +3,8 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.InputStream;
-import javax.ejb.EJB;
-import javax.inject.Inject;
+import jakarta.ejb.EJB;
+import jakarta.inject.Inject;
 import java.util.*;
 import java.util.logging.Level;
 import java.util.logging.Logger;
@@ -222,6 +222,11 @@ public GlobalId parsePersistentId(String fullIdentifierString) {
         if(!isConfigured()) {
             return null;
         }
+        // Occasionally, the protocol separator character ':' comes in still
+        // URL-encoded as %3A (usually as a result of the URL having been 
+        // encoded twice):
+        fullIdentifierString = fullIdentifierString.replace("%3A", ":");
+        
         int index1 = fullIdentifierString.indexOf(':');
         if (index1 > 0) { // ':' found with one or more characters before it
             String protocol = fullIdentifierString.substring(0, index1);
diff --git a/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java b/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java
index 6fc7262925a..db3c6029a78 100644
--- a/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java
+++ b/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java
@@ -3,14 +3,14 @@
 
 import java.io.Serializable;
 import java.util.Date;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ApiTokenPage.java b/src/main/java/edu/harvard/iq/dataverse/ApiTokenPage.java
index 4838847e400..16ff4d266d8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ApiTokenPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ApiTokenPage.java
@@ -5,14 +5,14 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.api.Util;
-import java.sql.Timestamp;
+
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /**
  * @todo Rename this to ApiTokenFragment? The separate page is being taken out
diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java
index 344032ef5e3..d03ebbc6f7b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java
@@ -4,16 +4,16 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.Serializable;
 import java.util.MissingResourceException;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedNativeQueries;
-import javax.persistence.NamedNativeQuery;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedNativeQueries;
+import jakarta.persistence.NamedNativeQuery;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java
index 05f3e209632..8c96f98ce39 100644
--- a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java
@@ -14,19 +14,19 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.TypedQuery;
-import javax.ws.rs.ClientErrorException;
-import javax.ws.rs.InternalServerErrorException;
-import javax.ws.rs.ServerErrorException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.TypedQuery;
+import jakarta.ws.rs.ClientErrorException;
+import jakarta.ws.rs.InternalServerErrorException;
+import jakarta.ws.rs.ServerErrorException;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 
 import org.apache.tika.Tika;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java b/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java
index 4f465168580..214e26965fa 100644
--- a/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java
@@ -4,13 +4,13 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.Serializable;
 import java.util.Collection;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.OneToMany;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.OneToMany;
 
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java
index 91b4128c545..0e757998d58 100644
--- a/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java
@@ -10,10 +10,10 @@
 import java.util.Date;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/BannerMessageText.java b/src/main/java/edu/harvard/iq/dataverse/BannerMessageText.java
index dbae9a6dc27..ea2dd1b41fc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/BannerMessageText.java
+++ b/src/main/java/edu/harvard/iq/dataverse/BannerMessageText.java
@@ -6,13 +6,13 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/CitationServlet.java b/src/main/java/edu/harvard/iq/dataverse/CitationServlet.java
index f6b4e3dc99a..68c8d49ad7e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/CitationServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/CitationServlet.java
@@ -6,14 +6,14 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.pidproviders.PidUtil;
-import edu.harvard.iq.dataverse.util.StringUtil;
+
 import java.io.IOException;
-import java.io.PrintWriter;
-import javax.ejb.EJB;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+
+import jakarta.ejb.EJB;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServlet;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java b/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java
index d51a73fd2dc..bf509c33995 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java
@@ -16,10 +16,10 @@
 
 import java.sql.Timestamp;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import java.util.Date;
 
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabAlternate.java b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabAlternate.java
index 5d5d9597746..9542cfe3f71 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabAlternate.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabAlternate.java
@@ -7,15 +7,15 @@
 
 import java.io.Serializable;
 import java.util.Objects;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java
index 181d939f4a1..5dcce98a90f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java
@@ -17,16 +17,16 @@
 import java.util.Objects;
 import java.util.logging.Logger;
 import java.util.MissingResourceException;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueConverter.java b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueConverter.java
index 1d530e136ba..eadc13721b3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueConverter.java
@@ -5,13 +5,13 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
 
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueServiceBean.java
index 0e9501414d0..4255c3b2dbc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueServiceBean.java
@@ -6,11 +6,11 @@
 package edu.harvard.iq.dataverse;
 
 import java.util.List;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java
index 64723fff79a..2cb6f27c3e4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java
+++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse;
 import java.io.Serializable;
 import java.util.List;
-import javax.persistence.*;
+import jakarta.persistence.*;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestionResponse.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestionResponse.java
index 32af06014a7..f19ee3c3fc7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestionResponse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestionResponse.java
@@ -7,8 +7,8 @@
 
 import java.io.Serializable;
 import java.util.List;
-import javax.faces.model.SelectItem;
-import javax.persistence.*;
+import jakarta.faces.model.SelectItem;
+import jakarta.persistence.*;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestionValue.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestionValue.java
index a5329c8b96d..f3a6b83b53f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestionValue.java
+++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestionValue.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import javax.persistence.*;
+import jakarta.persistence.*;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomizationFilesServlet.java b/src/main/java/edu/harvard/iq/dataverse/CustomizationFilesServlet.java
index 713d365ba0f..9dd524127d7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/CustomizationFilesServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/CustomizationFilesServlet.java
@@ -14,13 +14,13 @@
 import java.io.PrintWriter;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import javax.servlet.ServletException;
-import javax.servlet.annotation.WebServlet;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.annotation.WebServlet;
+import jakarta.servlet.http.HttpServlet;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import javax.ejb.EJB;
+import jakarta.ejb.EJB;
 import org.apache.commons.io.IOUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterCache.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterCache.java
index 7ccd4adb78f..7c75b1a4da6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterCache.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterCache.java
@@ -7,14 +7,14 @@
 
 
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Lob;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Lob;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java
index b748897dafe..9ecc4a3ecc9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java
@@ -18,11 +18,11 @@
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import org.apache.commons.text.StringEscapeUtils;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java
index fa0a745d80f..48786b41824 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java
@@ -3,7 +3,6 @@
 import java.io.IOException;
 import java.net.HttpURLConnection;
 import java.net.URL;
-import java.util.ArrayList;
 import java.util.Base64;
 import java.util.HashMap;
 import java.util.List;
@@ -11,8 +10,8 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
 
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import org.apache.commons.httpclient.HttpException;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java
index d9b0fde15da..86b74b72f30 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java
@@ -7,7 +7,7 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import javax.ejb.Stateless;
+import jakarta.ejb.Stateless;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java b/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java
index 99c7951c96e..c37c3f52bc7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java
@@ -5,23 +5,21 @@
  */
 package edu.harvard.iq.dataverse;
 
-import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
 import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
 import edu.harvard.iq.dataverse.harvest.client.HarvestingClientServiceBean;
 import edu.harvard.iq.dataverse.harvest.server.OAISet;
 import edu.harvard.iq.dataverse.harvest.server.OAISetServiceBean;
-import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
index 30e03046822..9b4b89db44f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
@@ -14,7 +14,6 @@
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
-import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Date;
@@ -27,7 +26,7 @@
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
 
-import javax.ejb.EJBException;
+import jakarta.ejb.EJBException;
 import javax.xml.stream.XMLOutputFactory;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamWriter;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 4e323496188..0f83ae3c5c8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -29,10 +29,10 @@
 import java.util.Set;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.persistence.*;
-import javax.validation.constraints.Pattern;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.persistence.*;
+import jakarta.validation.constraints.Pattern;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java b/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java
index f569a69b13a..f5abe9ac78a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java
@@ -10,16 +10,16 @@
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Collection;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToMany;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToMany;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBean.java
index 3fa4691a6dd..29dcb22c3ec 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBean.java
@@ -3,8 +3,8 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileConverter.java b/src/main/java/edu/harvard/iq/dataverse/DataFileConverter.java
index 18531f5203d..701e826f12e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileConverter.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse;
 
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
 
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 @FacesConverter("dataFileConverter")
 public class DataFileConverter implements Converter {
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
index c30bfce368a..98ee3351458 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
@@ -21,16 +21,16 @@
 import java.util.UUID;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java
index 275d47cf1de..f4f66d3c874 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java
@@ -11,15 +11,15 @@
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataTable.java b/src/main/java/edu/harvard/iq/dataverse/DataTable.java
index 614e7394583..a17d8c65138 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataTable.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataTable.java
@@ -7,26 +7,23 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import java.util.ArrayList;
 import java.util.List;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
-import javax.validation.constraints.Size;
-import javax.persistence.OrderBy;
-import org.hibernate.validator.constraints.NotBlank;
-import org.hibernate.validator.constraints.URL;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
+import jakarta.validation.constraints.Size;
+import jakarta.persistence.OrderBy;
 
 import edu.harvard.iq.dataverse.datavariable.DataVariable;
 import java.util.Objects;
-import javax.persistence.Column;
-import javax.persistence.Index;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Index;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataTagsAPITestingBean.java b/src/main/java/edu/harvard/iq/dataverse/DataTagsAPITestingBean.java
index 2f987dde82b..713c86190fc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataTagsAPITestingBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataTagsAPITestingBean.java
@@ -5,11 +5,11 @@
 import java.io.Serializable;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.enterprise.context.SessionScoped;
-import javax.faces.context.FacesContext;
-import javax.inject.Named;
-import javax.json.JsonObject;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.SessionScoped;
+import jakarta.faces.context.FacesContext;
+import jakarta.inject.Named;
+import jakarta.json.JsonObject;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataTagsContainer.java b/src/main/java/edu/harvard/iq/dataverse/DataTagsContainer.java
index 5cf9c623bde..eeda70c1f17 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataTagsContainer.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataTagsContainer.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.Stateless;
-import javax.json.JsonObject;
+import jakarta.ejb.Stateless;
+import jakarta.json.JsonObject;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
index f9c839a0fff..620e66c6c54 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
@@ -17,22 +17,22 @@
 import java.util.List;
 import java.util.Objects;
 import java.util.Set;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.NamedStoredProcedureQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.OneToOne;
-import javax.persistence.OrderBy;
-import javax.persistence.ParameterMode;
-import javax.persistence.StoredProcedureParameter;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.NamedStoredProcedureQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.ParameterMode;
+import jakarta.persistence.StoredProcedureParameter;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.util.StringUtil;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetConverter.java b/src/main/java/edu/harvard/iq/dataverse/DatasetConverter.java
index 2d19cf5fe06..b779e084250 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetConverter.java
@@ -6,12 +6,12 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 @FacesConverter("datasetConverter")
 public class DatasetConverter implements Converter {
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetDistributor.java b/src/main/java/edu/harvard/iq/dataverse/DatasetDistributor.java
index 00936b9365a..3252b7f0367 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetDistributor.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetDistributor.java
@@ -7,7 +7,7 @@
 package edu.harvard.iq.dataverse;
 
 import java.util.Comparator;
-import javax.persistence.Version;
+import jakarta.persistence.Version;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java
index 31d08f84c02..c836a20893f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java
@@ -19,20 +19,20 @@
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.JoinTable;
-import javax.persistence.ManyToMany;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
-import javax.persistence.Transient;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.JoinTable;
+import jakarta.persistence.ManyToMany;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
 import org.apache.commons.lang3.StringUtils;
 
 @Entity
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java
index 5d83f1e4f8d..c679cd7edad 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java
@@ -14,17 +14,17 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
-import javax.persistence.Transient;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.tuple.ImmutablePair;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java
index e57a2a1538d..1621b80df55 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java
@@ -6,8 +6,8 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.enterprise.context.Dependent;
-import javax.inject.Named;
+import jakarta.enterprise.context.Dependent;
+import jakarta.inject.Named;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java
index bad482dbca9..7746099818e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java
@@ -8,18 +8,18 @@
 
 import java.io.Serializable;
 import java.util.Collection;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
index 89f8c11d076..620d4bf3e09 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
@@ -17,24 +17,24 @@
 import java.util.Set;
 import java.util.logging.Logger;
 
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonException;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import javax.json.JsonString;
-import javax.json.JsonValue;
-import javax.json.JsonValue.ValueType;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonException;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonReader;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue;
+import jakarta.json.JsonValue.ValueType;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.httpclient.HttpException;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java
index df126514308..824b486a42d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java
@@ -13,8 +13,8 @@
 import java.util.Set;
 import java.util.TreeMap;
 import java.util.MissingResourceException;
-import javax.faces.model.SelectItem;
-import javax.persistence.*;
+import jakarta.faces.model.SelectItem;
+import jakarta.persistence.*;
 
 /**
  * Defines the meaning and constraints of a metadata field and its values.
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValidator.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValidator.java
index 3ded24d7a59..6d3fda2812d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValidator.java
@@ -5,11 +5,11 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.validation.ConstraintValidator;
-import javax.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintValidator;
+import jakarta.validation.ConstraintValidatorContext;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import java.util.Collections;
+
 import java.util.List;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java
index 2447a6478fd..1064187ccd6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java
@@ -10,17 +10,17 @@
 import edu.harvard.iq.dataverse.util.MarkupChecker;
 import java.io.Serializable;
 import java.util.Comparator;
-import java.util.ResourceBundle;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
-import javax.persistence.Transient;
+
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java
index 132955859ff..b6c21014f04 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java
@@ -13,14 +13,13 @@
 import java.util.GregorianCalendar;
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
-import javax.validation.ConstraintValidator;
-import javax.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintValidator;
+import jakarta.validation.ConstraintValidatorContext;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.validation.EMailValidator;
 import edu.harvard.iq.dataverse.validation.URLValidator;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.validator.routines.UrlValidator;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingDataverse.java b/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingDataverse.java
index 8f8e9b103c1..dec07a09643 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingDataverse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingDataverse.java
@@ -2,19 +2,19 @@
 
 import java.io.Serializable;
 import java.util.Date;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingServiceBean.java
index 3789efcd443..39c82bfa3f1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingServiceBean.java
@@ -8,12 +8,13 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
@@ -63,7 +64,7 @@ public DatasetLinkingDataverse findDatasetLinkingDataverse(Long datasetId, Long
                 .setParameter("datasetId", datasetId)
                 .setParameter("linkingDataverseId", linkingDataverseId)
                 .getSingleResult();            
-        } catch (javax.persistence.NoResultException e) {
+        } catch (NoResultException e) {
             logger.fine("no datasetLinkingDataverse found for datasetId " + datasetId + " and linkingDataverseId " + linkingDataverseId);        
             return null;
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java b/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java
index 7b857545c20..cc0078ecbc5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java
@@ -20,25 +20,24 @@
 
 package edu.harvard.iq.dataverse;
 
-import static edu.harvard.iq.dataverse.DatasetLock.Reason.Workflow;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import java.util.Date;
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index 393c6cfad16..d20175b6e1a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -58,7 +58,6 @@
 
 import edu.harvard.iq.dataverse.util.StringUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import edu.harvard.iq.dataverse.util.URLTokenUtil;
 import edu.harvard.iq.dataverse.util.WebloaderUtil;
 import edu.harvard.iq.dataverse.validation.URLValidator;
 import edu.harvard.iq.dataverse.workflows.WorkflowComment;
@@ -84,27 +83,27 @@
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
 
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.event.ValueChangeEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.event.ValueChangeEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 import org.apache.commons.lang3.StringUtils;
 import org.primefaces.event.FileUploadEvent;
 import org.primefaces.model.file.UploadedFile;
 
-import javax.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolation;
 import org.apache.commons.httpclient.HttpClient;
 //import org.primefaces.context.RequestContext;
 import java.util.Arrays;
 import java.util.HashSet;
-import javax.faces.model.SelectItem;
-import javax.faces.validator.ValidatorException;
+import jakarta.faces.model.SelectItem;
+import jakarta.faces.validator.ValidatorException;
 
 import java.util.logging.Level;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
@@ -125,12 +124,12 @@
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean;
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
 import java.util.Collections;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
 
-import javax.faces.event.AjaxBehaviorEvent;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.faces.event.AjaxBehaviorEvent;
+import jakarta.servlet.ServletOutputStream;
+import jakarta.servlet.http.HttpServletResponse;
 
 import org.apache.commons.text.StringEscapeUtils;
 import org.apache.commons.lang3.mutable.MutableBoolean;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetRelMaterial.java b/src/main/java/edu/harvard/iq/dataverse/DatasetRelMaterial.java
index f432e4f5bbf..53ea62f566a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetRelMaterial.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetRelMaterial.java
@@ -6,14 +6,14 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Version;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Version;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
index c93236f347b..52eb5868c35 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
@@ -31,22 +31,21 @@
 import java.util.logging.FileHandler;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.LockModeType;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.StoredProcedureQuery;
-import javax.persistence.TypedQuery;
-import org.apache.commons.lang3.RandomStringUtils;
-import org.ocpsoft.common.util.Strings;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.LockModeType;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.StoredProcedureQuery;
+import jakarta.persistence.TypedQuery;
+import org.apache.commons.lang3.StringUtils;
 
 /**
  *
@@ -119,7 +118,7 @@ public Dataset find(Object pk) {
     public Dataset findDeep(Object pk) {
         return (Dataset) em.createNamedQuery("Dataset.findById")
             .setParameter("id", pk)
-            // Optimization hints: retrieve all data in one query; this prevents point queries when iterating over the files 
+            // Optimization hints: retrieve all data in one query; this prevents point queries when iterating over the files
             .setHint("eclipselink.left-join-fetch", "o.files.ingestRequest")
             .setHint("eclipselink.left-join-fetch", "o.files.thumbnailForDataset")
             .setHint("eclipselink.left-join-fetch", "o.files.dataTables")
@@ -331,7 +330,7 @@ public Dataset findByGlobalId(String globalId) {
      * in the dataset components, a ConstraintViolationException will be thrown,
      * which can be further parsed to detect the specific offending values.
      * @param id the id of the dataset
-     * @throws javax.validation.ConstraintViolationException
+     * @throws ConstraintViolationException
      */
 
     @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
@@ -399,7 +398,7 @@ public DatasetVersionUser getDatasetVersionUser(DatasetVersion version, User use
         query.setParameter("userId", au.getId());
         try {
             return query.getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (NoResultException e) {
             return null;
         }
     }
@@ -514,7 +513,7 @@ public List<DatasetLock> listLocks(DatasetLock.Reason lockType, AuthenticatedUse
         }
         try {
             return query.getResultList();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (NoResultException e) {
             return null;
         }
     }
@@ -595,7 +594,7 @@ public Map<Long, String> getArchiveDescriptionsForHarvestedDatasets(Set<Long> da
             return null;
         }
 
-        String datasetIdStr = Strings.join(datasetIds, ", ");
+        String datasetIdStr = StringUtils.join(datasetIds, ", ");
 
         String qstr = "SELECT d.id, h.archiveDescription FROM harvestingClient h, dataset d WHERE d.harvestingClient_id = h.id AND d.id IN (" + datasetIdStr + ")";
         List<Object[]> searchResults;
@@ -771,11 +770,11 @@ public void reExportDatasetAsync(Dataset dataset) {
 
     public void exportDataset(Dataset dataset, boolean forceReExport) {
         if (dataset != null) {
-            // Note that the logic for handling a dataset is similar to what is implemented in exportAllDatasets, 
+            // Note that the logic for handling a dataset is similar to what is implemented in exportAllDatasets,
             // but when only one dataset is exported we do not log in a separate export logging file
             if (dataset.isReleased() && dataset.getReleasedVersion() != null && !dataset.isDeaccessioned()) {
 
-                // can't trust dataset.getPublicationDate(), no. 
+                // can't trust dataset.getPublicationDate(), no.
                 Date publicationDate = dataset.getReleasedVersion().getReleaseTime(); // we know this dataset has a non-null released version! Maybe not - SEK 8/19 (We do now! :)
                 if (forceReExport || (publicationDate != null
                         && (dataset.getLastExportTime() == null
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetTopicClass.java b/src/main/java/edu/harvard/iq/dataverse/DatasetTopicClass.java
index f253e1810a1..91a4ff3cf5a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetTopicClass.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetTopicClass.java
@@ -6,8 +6,8 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.persistence.Column;
-import javax.persistence.Version;
+import jakarta.persistence.Column;
+import jakarta.persistence.Version;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
index 9d5c27ae9fd..93f45bd288e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
@@ -25,37 +25,37 @@
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
 
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.OneToOne;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
-import javax.persistence.Transient;
-import javax.persistence.UniqueConstraint;
-import javax.persistence.Version;
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
-import javax.validation.constraints.Size;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
+import jakarta.persistence.Transient;
+import jakarta.persistence.UniqueConstraint;
+import jakarta.persistence.Version;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
+import jakarta.validation.constraints.Size;
 import org.apache.commons.lang3.StringUtils;
 
 /**
@@ -1814,7 +1814,7 @@ public String getPublicationDateAsString() {
     // So something will need to be modified to accommodate this. -- L.A.  
     /**
      * We call the export format "Schema.org JSON-LD" and extensive Javadoc can
-     * be found in {@link SchemaDotOrgExporter}.
+     * be found in {@link edu.harvard.iq.dataverse.export.SchemaDotOrgExporter}.
      */
     public String getJsonLd() {
         // We show published datasets only for "datePublished" field below.
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionConverter.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionConverter.java
index 98f0d707bdc..b670fb18afc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionConverter.java
@@ -5,12 +5,12 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionNoteValidator.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionNoteValidator.java
index c086fed3b10..a5ea487a68f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionNoteValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionNoteValidator.java
@@ -6,8 +6,8 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import javax.validation.ConstraintValidator;
-import javax.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintValidator;
+import jakarta.validation.ConstraintValidatorContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
index 607c46d3662..28243c37eee 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
@@ -13,7 +13,7 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.MarkupChecker;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import java.io.IOException;
+
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -22,22 +22,21 @@
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
-import java.util.concurrent.Future;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.TypedQuery;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.solr.client.solrj.SolrServerException;
-    
+
 /**
  *
  * @author skraffmiller
@@ -196,7 +195,7 @@ public DatasetVersion findByFriendlyVersionNumber(Long datasetId, String friendl
                 query.setParameter("majorVersionNumber", majorVersionNumber);
                 query.setParameter("minorVersionNumber", minorVersionNumber);
                 foundDatasetVersion = (DatasetVersion) query.getSingleResult();
-            } catch (javax.persistence.NoResultException e) {
+            } catch (NoResultException e) {
                 logger.warning("no ds version found: " + datasetId + " " + friendlyVersionNumber);
                 // DO nothing, just return null.
             }
@@ -224,7 +223,7 @@ public DatasetVersion findByFriendlyVersionNumber(Long datasetId, String friendl
                     }
                 }
                 return retVal;
-            } catch (javax.persistence.NoResultException e) {
+            } catch (NoResultException e) {
                 logger.warning("no ds version found: " + datasetId + " " + friendlyVersionNumber);
                 // DO nothing, just return null.
             }
@@ -451,7 +450,7 @@ private DatasetVersion getDatasetVersionByQuery(String queryString){
             msg("Found: " + ds);
             return ds;
             
-        } catch (javax.persistence.NoResultException e) {
+        } catch (NoResultException e) {
             msg("DatasetVersion not found: " + queryString);
             logger.log(Level.FINE, "DatasetVersion not found: {0}", queryString);
             return null;
@@ -1217,7 +1216,7 @@ public List<DatasetVersion> getUnarchivedDatasetVersions(){
         try {
             List<DatasetVersion> dsl = em.createNamedQuery("DatasetVersion.findUnarchivedReleasedVersion", DatasetVersion.class).getResultList();
             return dsl;
-        } catch (javax.persistence.NoResultException e) {
+        } catch (NoResultException e) {
             logger.log(Level.FINE, "No unarchived DatasetVersions found: {0}");
             return null;
         } catch (EJBException e) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java
index 6e9f9c17f7a..55b98c178bb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java
@@ -6,6 +6,7 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.util.MarkupChecker;
+
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Calendar;
@@ -14,12 +15,12 @@
 import java.util.Date;
 import java.util.List;
 import java.util.TreeMap;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.json.JsonObject;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUser.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUser.java
index eda62a080f8..e56fad71253 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUser.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUser.java
@@ -4,20 +4,20 @@
 import edu.harvard.iq.dataverse.authorization.users.User;
 import java.io.Serializable;
 import java.sql.Timestamp;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
 
-import javax.persistence.Id;
-import javax.persistence.Index;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
 
-import javax.persistence.JoinColumn;
+import jakarta.persistence.JoinColumn;
 
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  * Records the last time a {@link User} handled a {@link DatasetVersion}.
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetWidgetsPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetWidgetsPage.java
index 9c47a58811a..1dd42903118 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetWidgetsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetWidgetsPage.java
@@ -14,10 +14,10 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import org.primefaces.event.FileUploadEvent;
 import org.primefaces.model.file.UploadedFile;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java
index 50d5ae09548..682c1dc6744 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java
@@ -2,7 +2,6 @@
 
 import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
-import edu.harvard.iq.dataverse.dataaccess.DataAccess;
 import edu.harvard.iq.dataverse.search.savedsearch.SavedSearch;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
@@ -13,29 +12,28 @@
 import java.util.List;
 import java.util.Objects;
 import java.util.Set;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.FetchType;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.JoinTable;
-import javax.persistence.ManyToMany;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.OneToOne;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
-import javax.persistence.Transient;
-import javax.validation.constraints.NotNull;
-import javax.validation.constraints.Pattern;
-import javax.validation.constraints.Size;
-
-import org.apache.commons.lang3.StringUtils;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.FetchType;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.JoinTable;
+import jakarta.persistence.ManyToMany;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
+import jakarta.validation.constraints.NotNull;
+import jakarta.validation.constraints.Pattern;
+import jakarta.validation.constraints.Size;
+
 import org.hibernate.validator.constraints.NotBlank;
 import org.hibernate.validator.constraints.NotEmpty;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java b/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java
index 46021ddbc9b..d77767985eb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java
@@ -7,15 +7,15 @@
 
 import java.io.Serializable;
 import java.util.Objects;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 import edu.harvard.iq.dataverse.validation.ValidateEmail;
 import org.hibernate.validator.constraints.NotBlank;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java b/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java
index 7d09c300dde..d802117043b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java
@@ -6,12 +6,12 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java
index bfd465b8f54..768c2308e50 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java
@@ -8,16 +8,16 @@
 
 import java.io.Serializable;
 import java.util.Objects;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java
index 67bf6a820e2..5c77989f6d6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java
@@ -2,11 +2,11 @@
 
 import edu.harvard.iq.dataverse.util.LruCache;
 import java.util.List;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java
index 662ee74c3bf..39ad6ca9520 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java
@@ -2,16 +2,16 @@
 
 import java.io.Serializable;
 import java.util.Objects;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevel.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevel.java
index 92b1ff7c2cf..c4749be0cb3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevel.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevel.java
@@ -6,17 +6,17 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
-import javax.persistence.UniqueConstraint;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
+import jakarta.persistence.UniqueConstraint;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevelServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevelServiceBean.java
index 42a1290fdbd..66c700f59ce 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevelServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevelServiceBean.java
@@ -7,13 +7,13 @@
 
 import edu.harvard.iq.dataverse.util.LruCache;
 import java.util.List;
-import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
+
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseHeaderFragment.java b/src/main/java/edu/harvard/iq/dataverse/DataverseHeaderFragment.java
index 1e1353a11fc..389b85c19d9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseHeaderFragment.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseHeaderFragment.java
@@ -19,11 +19,11 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingDataverse.java b/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingDataverse.java
index 788308dce1e..3030922ea5e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingDataverse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingDataverse.java
@@ -7,18 +7,18 @@
 
 import java.io.Serializable;
 import java.util.Date;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingServiceBean.java
index c823deddb64..834ff96e392 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingServiceBean.java
@@ -8,13 +8,13 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
@@ -66,7 +66,7 @@ public DataverseLinkingDataverse findDataverseLinkingDataverse(Long dataverseId,
                 .setParameter("dataverseId", dataverseId)
                 .setParameter("linkingDataverseId", linkingDataverseId)
                 .getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (jakarta.persistence.NoResultException e) {
             logger.fine("No DataverseLinkingDataverse found for dataverseId " + dataverseId + " and linkedDataverseId " + linkingDataverseId);        
             return null;
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacet.java b/src/main/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacet.java
index a2659b81974..c93144b2e97 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacet.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse;
 
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 import java.io.Serializable;
 import java.util.Objects;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java
index b48ff725e1e..daf33f444d9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java
@@ -28,27 +28,26 @@
 import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.List;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
 import org.primefaces.model.DualListModel;
-import javax.ejb.EJBException;
-import javax.faces.event.ValueChangeEvent;
-import javax.faces.model.SelectItem;
+import jakarta.ejb.EJBException;
+import jakarta.faces.event.ValueChangeEvent;
+import jakarta.faces.model.SelectItem;
 import org.apache.commons.text.StringEscapeUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.primefaces.PrimeFaces;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRequestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRequestServiceBean.java
index e193b535412..58a3837dbf9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseRequestServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRequestServiceBean.java
@@ -1,11 +1,11 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
-import javax.annotation.PostConstruct;
-import javax.enterprise.context.RequestScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.annotation.PostConstruct;
+import jakarta.enterprise.context.RequestScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 
 /**
  * The service bean to go to when one needs the current {@link DataverseRequest}.
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java
index b83593f5b6e..78d5eaf3414 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java
@@ -17,13 +17,13 @@
 import java.util.Set;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
-//import javax.validation.constraints.NotNull;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
+//import jakarta.validation.constraints.NotNull;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java
index e092f209acd..7194a1ef31e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java
@@ -30,18 +30,18 @@
 import java.util.Map;
 import java.util.logging.Logger;
 import java.util.Properties;
-import java.util.concurrent.Future;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java b/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java
index c6016939c08..e8d76e1825e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java
@@ -18,13 +18,13 @@
 import java.util.List;
 import java.util.Locale;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.enterprise.context.SessionScoped;
-import javax.faces.context.FacesContext;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpSession;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.SessionScoped;
+import jakarta.faces.context.FacesContext;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpSession;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java b/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java
index 0c6341db485..539669328a7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java
@@ -8,16 +8,16 @@
 
 import java.io.Serializable;
 import java.util.Objects;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DefaultValueSet.java b/src/main/java/edu/harvard/iq/dataverse/DefaultValueSet.java
index ad48f15fc54..a2dc785c470 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DefaultValueSet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DefaultValueSet.java
@@ -8,13 +8,13 @@
 
 import java.io.Serializable;
 import java.util.List;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.OneToMany;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.OneToMany;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java
index e3013b8cf51..9e7f3f3fe96 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java
@@ -5,14 +5,13 @@
 
 import java.sql.Timestamp;
 import java.text.SimpleDateFormat;
-import java.util.Arrays;
 import java.util.Date;
 import java.util.List;
 import java.util.Objects;
 import java.util.Set;
 import java.util.logging.Logger;
 
-import javax.persistence.*;
+import jakarta.persistence.*;
 
 /**
  * Base of the object hierarchy for "anything that can be inside a dataverse".
diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java
index 6ff01ef3ea8..a322a25103e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java
@@ -2,8 +2,7 @@
 
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import java.util.Locale;
-import javax.persistence.MappedSuperclass;
+import jakarta.persistence.MappedSuperclass;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java
index c9127af7c2b..d4219c36149 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java
@@ -12,19 +12,18 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import static javax.ejb.TransactionAttributeType.REQUIRES_NEW;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.StoredProcedureQuery;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.StoredProcedureQuery;
 
 import org.apache.commons.lang3.StringUtils;
-import org.ocpsoft.common.util.Strings;
 
 /**
  * Your goto bean for everything {@link DvObject}, that's not tied to any
@@ -134,7 +133,7 @@ private DvObject runFindByGlobalId(Query query, GlobalId gid, DvObject.DType dty
             query.setParameter("authority", gid.getAuthority());
             query.setParameter("dtype", dtype.getDType());
             foundDvObject = (DvObject) query.getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (NoResultException e) {
             // (set to .info, this can fill the log file with thousands of
             // these messages during a large harvest run)
             logger.fine("no dvObject found: " + gid.asString());
@@ -155,7 +154,7 @@ private Long runFindIdByGlobalId(Query query, GlobalId gid, DvObject.DType dtype
             query.setParameter("authority", gid.getAuthority());
             query.setParameter("dtype", dtype.getDType());
             foundDvObject = (Long) query.getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (NoResultException e) {
             // (set to .info, this can fill the log file with thousands of
             // these messages during a large harvest run)
             logger.fine("no dvObject found: " + gid.asString());
@@ -323,7 +322,7 @@ public Map<Long, String> getObjectPathsByIds(Set<Long> objectIds){
             return null;
         }
         
-        String datasetIdStr = Strings.join(objectIds, ", ");
+        String datasetIdStr = StringUtils.join(objectIds, ", ");
         
         String qstr = "WITH RECURSIVE path_elements AS ((" +
             " SELECT id, owner_id FROM dvobject WHERE id in (" + datasetIdStr + "))" +
diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDataFilesPageHelper.java b/src/main/java/edu/harvard/iq/dataverse/EditDataFilesPageHelper.java
index 1bf6bee82eb..883baeedef4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EditDataFilesPageHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EditDataFilesPageHelper.java
@@ -4,8 +4,8 @@
 import edu.harvard.iq.dataverse.util.file.CreateDataFileResult;
 import org.apache.commons.text.StringEscapeUtils;
 
-import javax.ejb.Stateless;
-import javax.inject.Inject;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Inject;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Optional;
diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
index 1c033b37872..02a148f8cc5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
@@ -39,7 +39,6 @@
 import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.JsfHelper;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import edu.harvard.iq.dataverse.util.URLTokenUtil;
 import edu.harvard.iq.dataverse.util.WebloaderUtil;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.EjbUtil;
@@ -58,23 +57,23 @@
 import java.util.Map;
 import java.util.Optional;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 import edu.harvard.iq.dataverse.util.file.CreateDataFileResult;
 import org.primefaces.event.FileUploadEvent;
 import org.primefaces.model.file.UploadedFile;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonArray;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonReader;
 import org.apache.commons.httpclient.HttpClient;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.httpclient.methods.GetMethod;
@@ -82,10 +81,10 @@
 import java.util.Collection;
 import java.util.Set;
 import java.util.logging.Level;
-import javax.faces.event.AjaxBehaviorEvent;
-import javax.faces.event.FacesEvent;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.faces.event.AjaxBehaviorEvent;
+import jakarta.faces.event.FacesEvent;
+import jakarta.servlet.ServletOutputStream;
+import jakarta.servlet.http.HttpServletResponse;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.mutable.MutableBoolean;
 import org.primefaces.PrimeFaces;
diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java
index 7185887ecc3..bad8903c091 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java
@@ -24,11 +24,10 @@
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
 import edu.harvard.iq.dataverse.search.SearchServiceBean;
 import java.util.Map;
-import java.util.Map.Entry;
 import java.util.Set;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
 import edu.harvard.iq.dataverse.search.SolrIndexServiceBean;
 import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
@@ -41,16 +40,16 @@
 import java.util.Stack;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.annotation.Resource;
-import javax.ejb.EJBContext;
-import javax.ejb.EJBException;
-import javax.ejb.TransactionAttribute;
-import static javax.ejb.TransactionAttributeType.REQUIRES_NEW;
-import static javax.ejb.TransactionAttributeType.SUPPORTS;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
+import jakarta.annotation.Resource;
+import jakarta.ejb.EJBContext;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.TransactionAttribute;
+import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW;
+import static jakarta.ejb.TransactionAttributeType.SUPPORTS;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
 
 /**
  * An EJB capable of executing {@link Command}s in a JEE environment.
diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngineInner.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngineInner.java
index d8339dce856..891fe91dc66 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngineInner.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngineInner.java
@@ -4,13 +4,13 @@
 import edu.harvard.iq.dataverse.engine.command.Command;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
-import javax.annotation.Resource;
-import javax.ejb.EJBContext;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import static javax.ejb.TransactionAttributeType.REQUIRED;
+import jakarta.annotation.Resource;
+import jakarta.ejb.EJBContext;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import static jakarta.ejb.TransactionAttributeType.REQUIRED;
 
-import javax.inject.Named;
+import jakarta.inject.Named;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/Embargo.java b/src/main/java/edu/harvard/iq/dataverse/Embargo.java
index eac83edd296..29959b9f2d4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Embargo.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Embargo.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse;
 
 
-import javax.persistence.*;
+import jakarta.persistence.*;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/EmbargoServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/EmbargoServiceBean.java
index afbeab404c7..d0a8d214959 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EmbargoServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EmbargoServiceBean.java
@@ -3,12 +3,12 @@
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
 import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean;
 
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
 import java.util.List;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/ExternalVocabularyValue.java b/src/main/java/edu/harvard/iq/dataverse/ExternalVocabularyValue.java
index 3618da79630..7ebfa0302ac 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ExternalVocabularyValue.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ExternalVocabularyValue.java
@@ -9,13 +9,13 @@
 import java.io.Serializable;
 import java.sql.Timestamp;
 import java.util.Objects;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/FacetConverter.java b/src/main/java/edu/harvard/iq/dataverse/FacetConverter.java
index 75ef62200bf..fd41315dbc0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FacetConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FacetConverter.java
@@ -5,13 +5,13 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
 
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/FeaturedDataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FeaturedDataverseServiceBean.java
index e7362587c36..d4d701cb02f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FeaturedDataverseServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FeaturedDataverseServiceBean.java
@@ -9,11 +9,11 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java
index 76c5df4409a..6f68815c2ca 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java
@@ -2,16 +2,16 @@
 
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 
-import javax.persistence.Column;
-import javax.persistence.Embeddable;
-import javax.persistence.EmbeddedId;
-import javax.persistence.Entity;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.MapsId;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Embeddable;
+import jakarta.persistence.EmbeddedId;
+import jakarta.persistence.Entity;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.MapsId;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 import java.io.Serializable;
 import java.util.Date;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDirectoryNameValidator.java b/src/main/java/edu/harvard/iq/dataverse/FileDirectoryNameValidator.java
index e0c2b83ab65..84c033afcaf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileDirectoryNameValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileDirectoryNameValidator.java
@@ -7,8 +7,8 @@
 
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
-import javax.validation.ConstraintValidator;
-import javax.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintValidator;
+import jakarta.validation.ConstraintValidatorContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownload.java b/src/main/java/edu/harvard/iq/dataverse/FileDownload.java
index fad03d2a0a1..a79281f71f0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileDownload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileDownload.java
@@ -6,18 +6,18 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
-import javax.persistence.Transient;
-import javax.persistence.CascadeType;
-import javax.persistence.OneToOne;
-import javax.persistence.MapsId;
-import javax.persistence.FetchType;
-import javax.persistence.JoinColumn;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
+import jakarta.persistence.Transient;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.MapsId;
+import jakarta.persistence.FetchType;
+import jakarta.persistence.JoinColumn;
 import java.util.Date;
 
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java
index 850efc2f1ae..c4b4978e0f8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java
@@ -18,10 +18,10 @@
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import org.primefaces.PrimeFaces;
 //import org.primefaces.context.RequestContext;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java
index a90489be29a..e2b07717358 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java
@@ -18,7 +18,6 @@
 import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.StringUtil;
 import java.io.IOException;
@@ -29,16 +28,16 @@
 import java.util.List;
 import java.util.UUID;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.faces.context.FacesContext;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.faces.context.FacesContext;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.servlet.ServletOutputStream;
+import jakarta.servlet.http.HttpServletResponse;
 
 import org.primefaces.PrimeFaces;
 //import org.primefaces.context.RequestContext;
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
index 01131bdca01..461c8b14e46 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
@@ -22,24 +22,24 @@
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.CascadeType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.JoinTable;
-import javax.persistence.ManyToMany;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
-import javax.persistence.Transient;
-import javax.persistence.Version;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.JoinTable;
+import jakarta.persistence.ManyToMany;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
+import jakarta.persistence.Version;
 
 import edu.harvard.iq.dataverse.datavariable.CategoryMetadata;
 import edu.harvard.iq.dataverse.datavariable.DataVariable;
@@ -49,12 +49,12 @@
 import edu.harvard.iq.dataverse.util.StringUtil;
 import java.util.HashSet;
 import java.util.Set;
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
-import javax.validation.ValidatorFactory;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
+import jakarta.validation.ValidatorFactory;
 import org.hibernate.validator.constraints.NotBlank;
-import javax.validation.constraints.Pattern;
+import jakarta.validation.constraints.Pattern;
 
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java
index bee5ce20339..49c904c3ac3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java
@@ -30,8 +30,6 @@
 import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean;
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean;
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
-import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil;
-import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
@@ -39,9 +37,8 @@
 import edu.harvard.iq.dataverse.util.JsfHelper;
 import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import edu.harvard.iq.dataverse.util.json.JsonUtil;
+
 import java.io.IOException;
-import java.time.format.DateTimeFormatter;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -49,19 +46,19 @@
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.validator.ValidatorException;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonValue;
-import javax.validation.ConstraintViolation;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.validator.ValidatorException;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonValue;
+import jakarta.validation.ConstraintViolation;
 
 import org.primefaces.PrimeFaces;
 import org.primefaces.component.tabview.TabView;
@@ -1249,4 +1246,13 @@ public boolean isHasPublicStore() {
         return settingsWrapper.isTrueForKey(SettingsServiceBean.Key.PublicInstall, StorageIO.isPublicStore(DataAccess.getStorageDriverFromIdentifier(file.getStorageIdentifier())));
     }
 
+    /**
+     * This method only exists because in file-edit-button-fragment.xhtml we
+     * call bean.editFileMetadata() and we need both FilePage (this bean) and
+     * DatasetPage to have the method defined to prevent errors in server.log.
+     */
+    public String editFileMetadata(){
+        return "";
+    }
+
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileUploadRenderer.java b/src/main/java/edu/harvard/iq/dataverse/FileUploadRenderer.java
index 5e73ef65f25..ce3b0d65875 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileUploadRenderer.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileUploadRenderer.java
@@ -6,8 +6,8 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFieldMapping.java b/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFieldMapping.java
index 40d219d2638..db83ab953a1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFieldMapping.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFieldMapping.java
@@ -3,13 +3,8 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import javax.persistence.*;
+import jakarta.persistence.*;
 import java.util.Collection;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFormatMapping.java b/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFormatMapping.java
index 0fac75257c8..eb7b97b1a84 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFormatMapping.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFormatMapping.java
@@ -7,18 +7,18 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
 import java.util.List;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Index;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Index;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/Guestbook.java b/src/main/java/edu/harvard/iq/dataverse/Guestbook.java
index 18913bfd5bf..2ef23d1f925 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Guestbook.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Guestbook.java
@@ -3,27 +3,25 @@
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.Serializable;
-import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Date;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToMany;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToMany;
 import java.util.List;
 import java.util.Objects;
-import javax.persistence.Column;
-import javax.persistence.ManyToOne;
-import javax.persistence.OrderBy;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
-import javax.persistence.Transient;
+import jakarta.persistence.Column;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
+import jakarta.persistence.Transient;
 
 import edu.harvard.iq.dataverse.util.DateUtil;
-import org.apache.commons.text.StringEscapeUtils;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java
index 7cbb69e5c1d..9fb584a9133 100644
--- a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java
@@ -17,13 +17,13 @@
 import java.util.Iterator;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java
index 69404482fce..0057fbeddab 100644
--- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java
@@ -13,8 +13,8 @@
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.List;
-import javax.persistence.*;
-import javax.validation.constraints.Size;
+import jakarta.persistence.*;
+import jakarta.validation.constraints.Size;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java
index 2f795a4da74..bd598d2dca0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java
@@ -21,17 +21,17 @@
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.faces.model.SelectItem;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.StoredProcedureQuery;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.faces.model.SelectItem;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.StoredProcedureQuery;
+import jakarta.persistence.TypedQuery;
 import org.apache.commons.text.StringEscapeUtils;
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java
index 23aac4a24a3..c53df93def8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java
@@ -6,20 +6,19 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseCommand;
-import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.ServletOutputStream;
+import jakarta.servlet.http.HttpServletResponse;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookServiceBean.java
index 5394ddc652a..fcd4e91d455 100644
--- a/src/main/java/edu/harvard/iq/dataverse/GuestbookServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookServiceBean.java
@@ -5,12 +5,11 @@
  */
 package edu.harvard.iq.dataverse;
 
-import java.util.List;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java
index d2149a3072a..4942db9e7ec 100644
--- a/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java
@@ -29,8 +29,8 @@
 import java.util.*;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
 import java.security.PrivateKey;
 
 /* Handlenet imports: */
diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java
index 5be7578f7f8..f008db1403f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java
@@ -27,17 +27,17 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.model.SelectItem;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.model.SelectItem;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingDataverseConfig.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingDataverseConfig.java
index 28df6e19e65..6709b978c47 100644
--- a/src/main/java/edu/harvard/iq/dataverse/HarvestingDataverseConfig.java
+++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingDataverseConfig.java
@@ -6,16 +6,16 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java
index 432683a5797..6dbba34920b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java
@@ -6,11 +6,6 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
-import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
-import edu.harvard.iq.dataverse.engine.command.impl.CreateHarvestingClientCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.UpdateHarvestingClientCommand;
-import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
-import edu.harvard.iq.dataverse.harvest.client.HarvestingClientServiceBean;
 import edu.harvard.iq.dataverse.harvest.server.OAIRecord;
 import edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean;
 import edu.harvard.iq.dataverse.harvest.server.OAISet;
@@ -26,15 +21,15 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/HomepageServlet.java b/src/main/java/edu/harvard/iq/dataverse/HomepageServlet.java
index ef9b3267db4..e1864194436 100644
--- a/src/main/java/edu/harvard/iq/dataverse/HomepageServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/HomepageServlet.java
@@ -7,12 +7,12 @@
 
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.io.IOException;
-import javax.ejb.EJB;
-import javax.servlet.RequestDispatcher;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.ejb.EJB;
+import jakarta.servlet.RequestDispatcher;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServlet;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/LinkValidator.java b/src/main/java/edu/harvard/iq/dataverse/LinkValidator.java
index 2ecfc55f67e..7d540f0a425 100644
--- a/src/main/java/edu/harvard/iq/dataverse/LinkValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/LinkValidator.java
@@ -5,13 +5,13 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.validator.FacesValidator;
-import javax.faces.validator.Validator;
-import javax.faces.validator.ValidatorException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.validator.FacesValidator;
+import jakarta.faces.validator.Validator;
+import jakarta.faces.validator.ValidatorException;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
 @FacesValidator(value = "linkValidator")
diff --git a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java
index 2420ce08550..16d2cc53cb9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java
@@ -9,30 +9,27 @@
 import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationFailedException;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
-import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProvider;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.JsfHelper;
-import edu.harvard.iq.dataverse.util.SessionUtil;
 
-import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.UnsupportedEncodingException;
 import java.net.URLDecoder;
 import java.util.*;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.event.AjaxBehaviorEvent;
-import javax.faces.validator.ValidatorException;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.AjaxBehaviorEvent;
+import jakarta.faces.validator.ValidatorException;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java
index bc7b34ee8b7..f17732df7b6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java
@@ -5,7 +5,6 @@
  */
 package edu.harvard.iq.dataverse;
 
-import com.sun.mail.smtp.SMTPSendFailedException;
 import edu.harvard.iq.dataverse.authorization.groups.Group;
 import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -24,25 +23,23 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Date;
-import java.util.Properties;
-import java.util.Map;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.annotation.Resource;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.mail.Address;
-import javax.mail.Message;
-import javax.mail.MessagingException;
-import javax.mail.Session;
-import javax.mail.Transport;
-import javax.mail.internet.AddressException;
-import javax.mail.internet.InternetAddress;
-import javax.mail.internet.MimeMessage;
+import jakarta.annotation.Resource;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.mail.Address;
+import jakarta.mail.Message;
+import jakarta.mail.MessagingException;
+import jakarta.mail.Session;
+import jakarta.mail.Transport;
+import jakarta.mail.internet.AddressException;
+import jakarta.mail.internet.InternetAddress;
+import jakarta.mail.internet.MimeMessage;
 
 import edu.harvard.iq.dataverse.validation.EMailValidator;
+import jakarta.json.JsonObject;
 import org.apache.commons.lang3.StringUtils;
 
 /**
@@ -124,9 +121,9 @@ public boolean sendSystemEmail(String to, String subject, String messageText, bo
                 try {
                     Transport.send(msg, recipients);
                     sent = true;
-                } catch (SMTPSendFailedException ssfe) {
+                } catch (MessagingException ssfe) {
                     logger.warning("Failed to send mail to: " + to);
-                    logger.warning("SMTPSendFailedException Message: " + ssfe);
+                    logger.warning("MessagingException Message: " + ssfe);
                 }
             } else {
                 logger.fine("Skipping sending mail to " + to + ", because the \"no-reply\" address not set (" + Key.SystemEmail + " setting).");
@@ -616,7 +613,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
             case DATASETMENTIONED:
                 String additionalInfo = userNotification.getAdditionalInfo();
                 dataset = (Dataset) targetObject;
-                javax.json.JsonObject citingResource = null;
+                JsonObject citingResource = null;
                 citingResource = JsonUtil.getJsonObject(additionalInfo);
                 
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java
index fd309790026..1b4af29c915 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java
@@ -12,7 +12,6 @@
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.RoleAssignee;
 import edu.harvard.iq.dataverse.authorization.RoleAssigneeDisplayInfo;
-import edu.harvard.iq.dataverse.authorization.groups.Group;
 import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean;
 import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -28,14 +27,14 @@
 import java.util.*;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import org.apache.commons.lang3.ObjectUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java
index 8513ca33b47..583e195ab0d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java
@@ -22,17 +22,17 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import org.apache.commons.lang3.StringUtils;
 
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java
index 7db0ecc0767..cc89cfd9d56 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java
@@ -11,17 +11,19 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.AbortProcessingException;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.event.AjaxBehaviorEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.servlet.ServletOutputStream;
+import jakarta.servlet.http.HttpServletResponse;
 
 /**
  *
@@ -325,7 +327,7 @@ public void setDisplayDownloadAll(boolean displayDownloadAll) {
         this.displayDownloadAll = displayDownloadAll;
     }
 
-    public String updateGuestbooksRoot(javax.faces.event.AjaxBehaviorEvent event) throws javax.faces.event.AbortProcessingException {
+    public String updateGuestbooksRoot(AjaxBehaviorEvent event) throws AbortProcessingException {
         try {
             dataverse = engineService.submit(
                     new UpdateDataverseGuestbookRootCommand(!isInheritGuestbooksValue(),
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java
index e71e04bc42f..bf78b9d088f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java
@@ -29,18 +29,17 @@
 import java.util.Date;
 import java.util.LinkedList;
 import java.util.List;
-import java.util.ResourceBundle;
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import org.apache.commons.text.StringEscapeUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java
index 37ee7948a14..98369a2eab3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java
@@ -14,15 +14,17 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import edu.harvard.iq.dataverse.util.BundleUtil;
+import jakarta.faces.event.AbortProcessingException;
+import jakarta.faces.event.AjaxBehaviorEvent;
 /**
  *
  * @author skraffmiller
@@ -237,7 +239,7 @@ public void viewSelectedTemplate(Template selectedTemplate) {
         tempPage.setTemplate(selectedTemplate);
     }
 
-    public String updateTemplatesRoot(javax.faces.event.AjaxBehaviorEvent event) throws javax.faces.event.AbortProcessingException {
+    public String updateTemplatesRoot(AjaxBehaviorEvent event) throws AbortProcessingException {
         try {
             if (dataverse.getOwner() != null) {
                 if (isInheritTemplatesValue() && dataverse.getDefaultTemplate() == null && dataverse.getOwner().getDefaultTemplate() != null) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java
index 33e75efffb5..0fd7c2efbc7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java
+++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java
@@ -8,21 +8,21 @@
 import java.util.List;
 import java.util.MissingResourceException;
 import java.util.Objects;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.OneToOne;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
-import javax.persistence.Transient;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockConverter.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockConverter.java
index c5bd48ae785..49c50e82efb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockConverter.java
@@ -5,13 +5,13 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
 
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java
index f34637dbfaf..bb6daa264ba 100644
--- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java
@@ -1,11 +1,11 @@
 package edu.harvard.iq.dataverse;
 
 import java.util.List;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/Metric.java b/src/main/java/edu/harvard/iq/dataverse/Metric.java
index 5526604f77c..0e71ab44db4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Metric.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Metric.java
@@ -5,21 +5,20 @@
  */
 package edu.harvard.iq.dataverse;
 
-import java.io.IOException;
 import java.io.Serializable;
 import java.sql.Timestamp;
 import java.util.Date;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java b/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java
index 37a11396f37..832d7ec19ef 100644
--- a/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java
@@ -14,12 +14,12 @@
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import org.apache.commons.lang3.StringUtils;
 
 /**
@@ -96,7 +96,8 @@ private String sendError(int errorCode) {
         try {
             context.getExternalContext().responseSendError(errorCode,null);
         } catch (IOException ex) {
-            Logger.getLogger(PermissionsWrapper.class.getName()).log(Level.SEVERE, null, ex);
+            //Logger.getLogger(PermissionsWrapper.class.getName()).log(Level.SEVERE, null, ex);
+            Logger.getLogger(NavigationWrapper.class.getName()).fine("Caught exception in sendError(): "+ex.getMessage());
         }
         context.responseComplete();
         return "";
diff --git a/src/main/java/edu/harvard/iq/dataverse/PackagePopupFragmentBean.java b/src/main/java/edu/harvard/iq/dataverse/PackagePopupFragmentBean.java
index fac2abeddb8..0c5218fb927 100644
--- a/src/main/java/edu/harvard/iq/dataverse/PackagePopupFragmentBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/PackagePopupFragmentBean.java
@@ -6,8 +6,8 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.faces.view.ViewScoped;
-import javax.inject.Named;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Named;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
index 8c0a0bf90b0..a1de33a764e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
@@ -1,6 +1,5 @@
 package edu.harvard.iq.dataverse;
 
-import edu.harvard.iq.dataverse.DatasetLock.Reason;
 import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
@@ -17,14 +16,14 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import java.util.HashSet;
 import java.util.List;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import static edu.harvard.iq.dataverse.engine.command.CommandHelper.CH;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
@@ -41,7 +40,7 @@
 import java.util.logging.Level;
 import java.util.stream.Collectors;
 import static java.util.stream.Collectors.toList;
-import javax.persistence.Query;
+import jakarta.persistence.Query;
 
 /**
  * Your one-stop-shop for deciding which user can do what action on which
diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java
index 4ee45fc85a1..5ce9edb3a9e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java
@@ -14,10 +14,10 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeConverter.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeConverter.java
index a5e4cebbd95..0d863f47324 100644
--- a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeConverter.java
@@ -7,13 +7,13 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.authorization.RoleAssignee;
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
 
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java
index c6f2b7f28a5..059d5a8ffd3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java
@@ -21,12 +21,12 @@
 import java.util.TreeMap;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java
index f053a449da4..df004fe1357 100644
--- a/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java
+++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java
@@ -3,19 +3,19 @@
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.RoleAssignee;
 import java.util.Objects;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
-import javax.persistence.UniqueConstraint;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
+import jakarta.persistence.UniqueConstraint;
 
 /**
  * A role of a user in a Dataverse. A User may have many roles in a given Dataverse.
diff --git a/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java b/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java
index dd3044d3749..1bd337452c2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java
+++ b/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java
@@ -26,16 +26,18 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
+import jakarta.faces.event.AbortProcessingException;
+import jakarta.faces.event.AjaxBehaviorEvent;
 import org.apache.commons.text.StringEscapeUtils;
 import org.apache.commons.lang3.StringUtils;
 
@@ -92,7 +94,7 @@ public void setInheritAssignments(boolean inheritAssignments) {
         this.inheritAssignments = inheritAssignments;
     }
 
-    public void updatePermissionRoot(javax.faces.event.AjaxBehaviorEvent event) throws javax.faces.event.AbortProcessingException {
+    public void updatePermissionRoot(AjaxBehaviorEvent event) throws AbortProcessingException {
         try {
             dvObject = commandEngine.submit(
                     new UpdatePermissionRootCommand(!inheritAssignments, 
diff --git a/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java b/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java
index a099f5f3939..71318a0184a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java
@@ -17,9 +17,6 @@
 import com.amazonaws.services.s3.model.S3Object;
 import com.amazonaws.services.s3.model.S3ObjectSummary;
 import edu.harvard.iq.dataverse.api.AbstractApiBean;
-import edu.harvard.iq.dataverse.batch.jobs.importer.filesystem.FileRecordWriter;
-import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
-import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import java.io.BufferedReader;
@@ -31,9 +28,9 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
 
 /**
  * This class is for importing files added to s3 outside of dataverse.
diff --git a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java
index 13a7cc51357..6be768321c4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java
+++ b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java
@@ -11,16 +11,16 @@
 import java.util.Optional;
 import java.util.Random;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.validator.ValidatorException;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.mail.internet.InternetAddress;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.validator.ValidatorException;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.mail.internet.InternetAddress;
 import org.apache.commons.validator.routines.EmailValidator;
 
 @ViewScoped
diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java
index 14b429f3219..307301049f0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java
@@ -27,17 +27,16 @@
 import java.util.logging.Logger;
 import java.util.Set;
 
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.validator.ValidatorException;
-import javax.faces.view.ViewScoped;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.mail.internet.InternetAddress;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.validator.ValidatorException;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Named;
+import jakarta.json.JsonObject;
+import jakarta.mail.internet.InternetAddress;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java
index 0f0e20aba94..bee1182e248 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Shib.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java
@@ -25,15 +25,15 @@
 import java.util.Date;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.ExternalContext;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.ExternalContext;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 
 @ViewScoped
 @Named("Shib")
diff --git a/src/main/java/edu/harvard/iq/dataverse/SuperUserPage.java b/src/main/java/edu/harvard/iq/dataverse/SuperUserPage.java
index cd6d53fd8a8..adf2e7d3010 100644
--- a/src/main/java/edu/harvard/iq/dataverse/SuperUserPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/SuperUserPage.java
@@ -6,11 +6,11 @@
 import java.util.concurrent.CancellationException;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Future;
-import javax.ejb.EJB;
-import javax.enterprise.context.SessionScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.json.JsonObjectBuilder;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.SessionScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.json.JsonObjectBuilder;
 
 @SessionScoped
 @Named("SuperUserPage")
diff --git a/src/main/java/edu/harvard/iq/dataverse/Template.java b/src/main/java/edu/harvard/iq/dataverse/Template.java
index cc1190d36d9..05c6df51197 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Template.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Template.java
@@ -12,31 +12,31 @@
 import java.util.TreeMap;
 import java.util.stream.Collectors;
 
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonString;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
-import javax.persistence.Transient;
-import javax.validation.constraints.Size;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonString;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
+import jakarta.persistence.Transient;
+import jakarta.validation.constraints.Size;
 
 import edu.harvard.iq.dataverse.util.DateUtil;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/TemplateConverter.java b/src/main/java/edu/harvard/iq/dataverse/TemplateConverter.java
index 98b24f84801..1d855e029ce 100644
--- a/src/main/java/edu/harvard/iq/dataverse/TemplateConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/TemplateConverter.java
@@ -6,12 +6,12 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java b/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java
index 56898943467..fff520fd259 100644
--- a/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java
@@ -15,12 +15,12 @@
 import java.util.Date;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.faces.application.FacesMessage;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java
index f2ac8779d2d..46382fc2588 100644
--- a/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java
@@ -2,14 +2,13 @@
 
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
 import java.util.List;
-import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java
index a8616283332..ee865770dbe 100644
--- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java
+++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java
@@ -6,15 +6,15 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToOne;
-import javax.persistence.Transient;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Transient;
 
 import edu.harvard.iq.dataverse.license.License;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java
index 2cf78db0f03..ca38a305d63 100644
--- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java
@@ -6,8 +6,8 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import javax.validation.ConstraintValidator;
-import javax.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintValidator;
+import jakarta.validation.ConstraintValidatorContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java b/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java
index e270d3842f6..9a62a99722a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java
@@ -18,16 +18,16 @@
 import java.nio.file.StandardCopyOption;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.annotation.PreDestroy;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.html.HtmlInputText;
-import javax.faces.context.FacesContext;
-import javax.faces.validator.ValidatorException;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.annotation.PreDestroy;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.html.HtmlInputText;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.validator.ValidatorException;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 import org.apache.commons.lang3.StringUtils;
 import org.primefaces.PrimeFaces;
diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java
index 6c8db8c124b..319ae06eefb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java
@@ -8,24 +8,21 @@
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
 import edu.harvard.iq.dataverse.dataaccess.StorageIO;
 import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
-import edu.harvard.iq.dataverse.dataset.DatasetUtil;
+
 import static edu.harvard.iq.dataverse.dataset.DatasetUtil.datasetLogoThumbnail;
 import edu.harvard.iq.dataverse.search.SolrSearchResult;
 import edu.harvard.iq.dataverse.util.FileUtil;
-import java.io.File;
+
 import java.io.IOException;
 import java.io.InputStream;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
 import java.util.Base64;
 import java.util.HashMap;
 import java.util.Map;
-import javax.ejb.EJB;
-import javax.enterprise.context.RequestScoped;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.RequestScoped;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import org.apache.commons.io.IOUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/UserBannerMessage.java b/src/main/java/edu/harvard/iq/dataverse/UserBannerMessage.java
index 7bd4f2d898f..888669ee615 100644
--- a/src/main/java/edu/harvard/iq/dataverse/UserBannerMessage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/UserBannerMessage.java
@@ -4,15 +4,15 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import java.io.Serializable;
 import java.util.Date;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java
index b68a1b9d13e..a87404b69a7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java
+++ b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java
@@ -12,17 +12,17 @@
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.stream.Collectors;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
-import javax.persistence.Transient;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java
index 947ee3ce989..a2a71ff8b40 100644
--- a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java
@@ -14,16 +14,16 @@
 import java.sql.Timestamp;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java
index 2d8ecf64f76..93892376edc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java
@@ -12,16 +12,15 @@
 import java.util.Date;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
 import org.apache.commons.lang3.StringUtils;
-import org.ocpsoft.common.util.Strings;
 
 @Stateless
 @Named
@@ -190,7 +189,7 @@ private HashMap<String, List<String>> retrieveRolesForUsers(List<Object[]> userO
         // Add '@' to each identifier and delimit the list by ","
         // -------------------------------------------------
         String identifierListString = userIdentifierList.stream()
-                                     .filter(x -> !Strings.isNullOrEmpty(x))
+                                     .filter(x -> x != null && !x.isEmpty())
                                      .map(x -> "'@" + x + "'")
                                      .collect(Collectors.joining(", "));
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/ValidateDataFileDirectoryName.java b/src/main/java/edu/harvard/iq/dataverse/ValidateDataFileDirectoryName.java
index 94e33d6220a..9e8ce42491d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ValidateDataFileDirectoryName.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ValidateDataFileDirectoryName.java
@@ -10,8 +10,8 @@
 import java.lang.annotation.Retention;
 import static java.lang.annotation.RetentionPolicy.RUNTIME;
 import java.lang.annotation.Target;
-import javax.validation.Constraint;
-import javax.validation.Payload;
+import jakarta.validation.Constraint;
+import jakarta.validation.Payload;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ValidateDatasetFieldType.java b/src/main/java/edu/harvard/iq/dataverse/ValidateDatasetFieldType.java
index ae7b4a1eaef..f36a1d9541e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ValidateDatasetFieldType.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ValidateDatasetFieldType.java
@@ -13,8 +13,8 @@
 import java.lang.annotation.Retention;
 import java.lang.annotation.Target;
 
-import javax.validation.Constraint;
-import javax.validation.Payload;
+import jakarta.validation.Constraint;
+import jakarta.validation.Payload;
 
 @Target({TYPE, ANNOTATION_TYPE})
 @Retention(RUNTIME)
diff --git a/src/main/java/edu/harvard/iq/dataverse/ValidateTermsOfUseAndAccess.java b/src/main/java/edu/harvard/iq/dataverse/ValidateTermsOfUseAndAccess.java
index 8717d10fc8d..f55e93af674 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ValidateTermsOfUseAndAccess.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ValidateTermsOfUseAndAccess.java
@@ -10,8 +10,8 @@
 import java.lang.annotation.Retention;
 import static java.lang.annotation.RetentionPolicy.RUNTIME;
 import java.lang.annotation.Target;
-import javax.validation.Constraint;
-import javax.validation.Payload;
+import jakarta.validation.Constraint;
+import jakarta.validation.Payload;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ValidateVersionNote.java b/src/main/java/edu/harvard/iq/dataverse/ValidateVersionNote.java
index 405a7feb52f..c8d64d4a642 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ValidateVersionNote.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ValidateVersionNote.java
@@ -11,8 +11,8 @@
 import java.lang.annotation.Retention;
 import static java.lang.annotation.RetentionPolicy.RUNTIME;
 import java.lang.annotation.Target;
-import javax.validation.Constraint;
-import javax.validation.Payload;
+import jakarta.validation.Constraint;
+import jakarta.validation.Payload;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java b/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java
index 743d8f2d092..a8ea5fabde4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java
@@ -5,10 +5,10 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletResponse;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java
index 31a9ad25e5b..6743c3f2143 100644
--- a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java
+++ b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java
@@ -3,16 +3,16 @@
 import java.util.Date;
 import java.util.Objects;
 import java.util.UUID;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.PrePersist;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.PrePersist;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  * Logs a single action in the action log.
diff --git a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java
index ba19fdd9eeb..2d16f52bb09 100644
--- a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java
@@ -1,11 +1,11 @@
 package edu.harvard.iq.dataverse.actionlogging;
 
 import java.util.Date;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  * A service bean that persists {@link ActionLogRecord}s to the DB.
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
index 6b3e27becb7..5a4c9ab9058 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
@@ -57,24 +57,24 @@
 import java.util.concurrent.Callable;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import javax.json.JsonValue;
-import javax.json.JsonValue.ValueType;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.*;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonReader;
+import jakarta.json.JsonValue;
+import jakarta.json.JsonValue.ValueType;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.*;
+import jakarta.ws.rs.core.Response.ResponseBuilder;
+import jakarta.ws.rs.core.Response.Status;
 
 import static org.apache.commons.lang3.StringUtils.isNumeric;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
index 02441a9ee11..0341f8c1127 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
@@ -31,7 +31,7 @@
 import edu.harvard.iq.dataverse.RoleAssignment;
 import edu.harvard.iq.dataverse.UserNotification;
 import edu.harvard.iq.dataverse.UserNotificationServiceBean;
-import static edu.harvard.iq.dataverse.api.AbstractApiBean.error;
+
 import static edu.harvard.iq.dataverse.api.Datasets.handleVersion;
 
 import edu.harvard.iq.dataverse.api.auth.AuthRequired;
@@ -73,7 +73,7 @@
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
 
 import java.util.logging.Logger;
-import javax.ejb.EJB;
+import jakarta.ejb.EJB;
 import java.io.InputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
@@ -88,47 +88,46 @@
 import java.util.List;
 import java.util.Properties;
 import java.util.logging.Level;
-import javax.inject.Inject;
-import javax.json.Json;
+import jakarta.inject.Inject;
+import jakarta.json.Json;
 import java.net.URI;
-import javax.json.JsonArrayBuilder;
-import javax.persistence.TypedQuery;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.UriInfo;
-
-
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.ForbiddenException;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.ServiceUnavailableException;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.Response;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import javax.ws.rs.core.StreamingOutput;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.persistence.TypedQuery;
+
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.HttpHeaders;
+import jakarta.ws.rs.core.UriInfo;
+
+
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.BadRequestException;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.ForbiddenException;
+import jakarta.ws.rs.NotFoundException;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.ServiceUnavailableException;
+import jakarta.ws.rs.WebApplicationException;
+import jakarta.ws.rs.core.Response;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import jakarta.ws.rs.core.StreamingOutput;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
 import java.net.URISyntaxException;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.RedirectionException;
-import javax.ws.rs.ServerErrorException;
-import javax.ws.rs.core.MediaType;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
+
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.RedirectionException;
+import jakarta.ws.rs.ServerErrorException;
+import jakarta.ws.rs.core.MediaType;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
 import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 import org.glassfish.jersey.media.multipart.FormDataParam;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
index 2e98388b4b6..fd3b9a89e54 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
@@ -18,7 +18,6 @@
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.validation.EMailValidator;
 import edu.harvard.iq.dataverse.EjbDataverseEngine;
-import edu.harvard.iq.dataverse.GlobalId;
 import edu.harvard.iq.dataverse.HandlenetServiceBean;
 import edu.harvard.iq.dataverse.Template;
 import edu.harvard.iq.dataverse.TemplateServiceBean;
@@ -48,19 +47,19 @@
 import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand;
 import edu.harvard.iq.dataverse.settings.Setting;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
 
 import java.io.InputStream;
@@ -69,14 +68,14 @@
 import java.util.Map.Entry;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.Response.Status;
 
 import org.apache.commons.io.IOUtils;
 
@@ -117,12 +116,12 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Date;
-import javax.inject.Inject;
-import javax.json.JsonArray;
-import javax.persistence.Query;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.StreamingOutput;
+import jakarta.inject.Inject;
+import jakarta.json.JsonArray;
+import jakarta.persistence.Query;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.WebApplicationException;
+import jakarta.ws.rs.core.StreamingOutput;
 
 /**
  * Where the secure, setup API calls live.
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java
index 6bf852d25f7..0e5b8226310 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java
@@ -10,21 +10,22 @@
 import java.util.TreeSet;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.ejb.EJB;
+import jakarta.servlet.Filter;
+import jakarta.servlet.FilterChain;
+import jakarta.servlet.FilterConfig;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.ServletRequest;
+import jakarta.servlet.ServletResponse;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 
 
 /**
  * A web filter to block API administration calls.
  * @author michael
  */
-public class ApiBlockingFilter implements javax.servlet.Filter {
+public class ApiBlockingFilter implements Filter {
     public static final String UNBLOCK_KEY_QUERYPARAM = "unblock-key";
             
     interface BlockPolicy {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java
index f9ba088a4e9..d076ab8f973 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.api;
 
-import javax.ws.rs.ApplicationPath;
+import jakarta.ws.rs.ApplicationPath;
 
 import edu.harvard.iq.dataverse.api.auth.AuthFilter;
 import org.glassfish.jersey.media.multipart.MultiPartFeature;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiRouter.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiRouter.java
index 691afeaef20..193e1059415 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/ApiRouter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiRouter.java
@@ -1,21 +1,22 @@
 package edu.harvard.iq.dataverse.api;
 
+import jakarta.servlet.Filter;
 import java.io.IOException;
 import java.util.logging.Logger;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.RequestDispatcher;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.servlet.FilterChain;
+import jakarta.servlet.FilterConfig;
+import jakarta.servlet.RequestDispatcher;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.ServletRequest;
+import jakarta.servlet.ServletResponse;
+import jakarta.servlet.http.HttpServletRequest;
 
 /**
  * Routes API calls that don't have a version number to the latest API version
  * 
  * @author michael
  */
-public class ApiRouter implements javax.servlet.Filter {
+public class ApiRouter implements Filter {
     private static final Logger logger = Logger.getLogger(ApiRouter.class.getName());
     
     @Override
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java
index bf9ce2adc5a..a2d06bff93e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java
@@ -15,16 +15,16 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.io.IOException;
 import java.io.PrintWriter;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 
 @Stateless
 @Path("batch")
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java
index 8fe58298481..daddc447117 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java
@@ -14,12 +14,12 @@
 import java.util.Date;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
 
 /**
  * EJB for kicking off big batch jobs asynchronously from the REST API  (BatchImport.java)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java
index 9262cc6ef46..50862bc0d35 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java
@@ -13,25 +13,22 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.sql.Timestamp;
-import java.util.Calendar;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.core.Response.Status;
 import java.util.Date;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
-import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
-import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
 
 /**
  * REST API bean for managing {@link BuiltinUser}s.
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java b/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java
index 7edb0ac838c..35f19375902 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java
@@ -12,14 +12,14 @@
 import java.io.OutputStream;
 import java.io.IOException;
 
-import javax.ws.rs.InternalServerErrorException;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.WebApplicationException;
-
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.ext.MessageBodyWriter;
-import javax.ws.rs.ext.Provider;
+import jakarta.ws.rs.InternalServerErrorException;
+import jakarta.ws.rs.NotFoundException;
+import jakarta.ws.rs.WebApplicationException;
+
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.ext.MessageBodyWriter;
+import jakarta.ws.rs.ext.Provider;
 
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.dataaccess.*;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DataTagsAPI.java b/src/main/java/edu/harvard/iq/dataverse/api/DataTagsAPI.java
index 063033d4747..d7c8bd827d1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/DataTagsAPI.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/DataTagsAPI.java
@@ -5,18 +5,18 @@
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.json.JsonObject;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.client.Client;
-import javax.ws.rs.client.ClientBuilder;
-import javax.ws.rs.client.WebTarget;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.client.Client;
+import jakarta.ws.rs.client.ClientBuilder;
+import jakarta.ws.rs.client.WebTarget;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java
index 4ec728a8159..00b7dfa6e36 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java
@@ -20,18 +20,18 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.core.Response;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.ConstraintViolationUtil;
@@ -41,9 +41,9 @@
 
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.persistence.NoResultException;
-import javax.persistence.TypedQuery;
-import javax.ws.rs.core.Response.Status;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.TypedQuery;
+import jakarta.ws.rs.core.Response.Status;
 
 import java.io.BufferedInputStream;
 import java.io.FileOutputStream;
@@ -207,7 +207,7 @@ public Response showControlledVocabularyForSubject() {
     @GET
     @Path("loadNAControlledVocabularyValue")
     public Response loadNAControlledVocabularyValue() {
-        // the find will throw a javax.persistence.NoResultException if no values are in db
+        // the find will throw a NoResultException if no values are in db
 //            datasetFieldService.findNAControlledVocabularyValue();
         TypedQuery<ControlledVocabularyValue> naValueFinder = em.createQuery("SELECT OBJECT(o) FROM ControlledVocabularyValue AS o WHERE o.datasetFieldType is null AND o.strValue = :strvalue", ControlledVocabularyValue.class);
         naValueFinder.setParameter("strvalue", DatasetField.NA_VALUE);
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index dcd7eacf50b..dbea63cb1c8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -63,9 +63,7 @@
 import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
 import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
-import edu.harvard.iq.dataverse.api.AbstractApiBean.WrappedResponse;
 import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO;
-import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
 import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
 import edu.harvard.iq.dataverse.dataaccess.S3AccessIO;
@@ -85,7 +83,6 @@
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key;
 import edu.harvard.iq.dataverse.util.ArchiverUtil;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.EjbUtil;
@@ -96,7 +93,6 @@
 import edu.harvard.iq.dataverse.util.json.JSONLDUtil;
 import edu.harvard.iq.dataverse.util.json.JsonLDTerm;
 import edu.harvard.iq.dataverse.util.json.JsonParseException;
-import edu.harvard.iq.dataverse.util.json.JsonPrinter;
 import edu.harvard.iq.dataverse.util.SignpostingResources;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
@@ -131,32 +127,31 @@
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
 
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.inject.Inject;
-import javax.json.*;
-import javax.json.stream.JsonParsingException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.GET;
-import javax.ws.rs.NotAcceptableException;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.*;
-import javax.ws.rs.core.Response.Status;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.inject.Inject;
+import jakarta.json.*;
+import jakarta.json.stream.JsonParsingException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.BadRequestException;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.DefaultValue;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.NotAcceptableException;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.*;
+import jakarta.ws.rs.core.Response.Status;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
 
 import org.apache.commons.lang3.StringUtils;
-import org.apache.solr.client.solrj.SolrServerException;
 import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
 import org.glassfish.jersey.media.multipart.FormDataParam;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
index 5214e3fbdcb..a60775cbd38 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
@@ -2,8 +2,6 @@
 
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.Dataset;
-import edu.harvard.iq.dataverse.DatasetAuthor;
-import edu.harvard.iq.dataverse.DatasetField;
 import edu.harvard.iq.dataverse.DatasetFieldType;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.Dataverse;
@@ -22,9 +20,7 @@
 import edu.harvard.iq.dataverse.GuestbookServiceBean;
 import edu.harvard.iq.dataverse.MetadataBlock;
 import edu.harvard.iq.dataverse.RoleAssignment;
-import edu.harvard.iq.dataverse.DatasetFieldType.FieldType;
 
-import static edu.harvard.iq.dataverse.api.AbstractApiBean.error;
 import edu.harvard.iq.dataverse.api.dto.ExplicitGroupDTO;
 import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO;
 import edu.harvard.iq.dataverse.api.dto.RoleDTO;
@@ -36,7 +32,6 @@
 import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupProvider;
 import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
-import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.dataverse.DataverseUtil;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
@@ -91,33 +86,33 @@
 import java.util.TreeSet;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonNumber;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonString;
-import javax.json.JsonValue;
-import javax.json.JsonValue.ValueType;
-import javax.json.stream.JsonParsingException;
-import javax.validation.ConstraintViolationException;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonNumber;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue;
+import jakarta.json.JsonValue.ValueType;
+import jakarta.json.stream.JsonParsingException;
+import jakarta.validation.ConstraintViolationException;
+import jakarta.ws.rs.BadRequestException;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.core.Response.Status;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
 import java.io.IOException;
@@ -129,11 +124,11 @@
 import java.util.Map;
 import java.util.Optional;
 import java.util.stream.Collectors;
-import javax.servlet.http.HttpServletResponse;
-import javax.validation.constraints.NotNull;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.StreamingOutput;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.validation.constraints.NotNull;
+import jakarta.ws.rs.WebApplicationException;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.StreamingOutput;
 import javax.xml.stream.XMLStreamException;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java
index c9eb3638b90..e9f869ad8b9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java
@@ -14,9 +14,9 @@
 import java.util.logging.Logger;
 
 import edu.harvard.iq.dataverse.dataaccess.OptionalAccessService;
-import javax.faces.context.FacesContext;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.UriInfo;
+import jakarta.faces.context.FacesContext;
+import jakarta.ws.rs.core.HttpHeaders;
+import jakarta.ws.rs.core.UriInfo;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java
index 2410da04072..af681234e82 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java
@@ -12,14 +12,14 @@
 import java.io.OutputStream;
 import java.io.IOException;
 
-import javax.ws.rs.WebApplicationException;
+import jakarta.ws.rs.WebApplicationException;
 
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.Response;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.core.Response;
 
-import javax.ws.rs.ext.MessageBodyWriter;
-import javax.ws.rs.ext.Provider;
+import jakarta.ws.rs.ext.MessageBodyWriter;
+import jakarta.ws.rs.ext.Provider;
 
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.dataaccess.*;
@@ -43,12 +43,12 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.inject.Inject;
-import javax.ws.rs.ClientErrorException;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.RedirectionException;
-import javax.ws.rs.ServiceUnavailableException;
-import javax.ws.rs.core.HttpHeaders;
+import jakarta.inject.Inject;
+import jakarta.ws.rs.ClientErrorException;
+import jakarta.ws.rs.NotFoundException;
+import jakarta.ws.rs.RedirectionException;
+import jakarta.ws.rs.ServiceUnavailableException;
+import jakarta.ws.rs.core.HttpHeaders;
 import org.apache.tika.mime.MimeType;
 import org.apache.tika.mime.MimeTypeException;
 import org.apache.tika.mime.MimeTypes;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java b/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java
index 3960fe4e996..1b74ab5479e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java
@@ -2,9 +2,7 @@
 
 import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.authorization.Permission;
-import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
-import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
@@ -28,28 +26,25 @@
 import edu.harvard.iq.dataverse.datavariable.VariableCategory;
 import edu.harvard.iq.dataverse.datavariable.VariableMetadataDDIParser;
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
-import org.apache.solr.client.solrj.SolrServerException;
-
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.inject.Inject;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.Path;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.PathParam;
+
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Inject;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.PathParam;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLInputFactory;
 import javax.xml.stream.XMLStreamReader;
-import java.io.IOException;
 import java.io.InputStream;
 
-import java.util.concurrent.Future;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.List;
@@ -57,11 +52,9 @@
 import java.util.Map;
 import java.util.HashMap;
 import java.util.Collection;
-import java.util.Date;
-import java.sql.Timestamp;
 
 
-import javax.validation.ConstraintViolationException;
+import jakarta.validation.ConstraintViolationException;
 
 @Stateless
 @Path("edit")
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java b/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java
index e53b54482b8..1feac1141bb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java
@@ -4,15 +4,15 @@
 import edu.harvard.iq.dataverse.externaltools.ExternalTool;
 import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Response;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.core.Response;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
 
 @Path("admin/externalTools")
 public class ExternalTools extends AbstractApiBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java b/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java
index 53829cf09cc..8a178f8da62 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java
@@ -11,17 +11,17 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.MailUtil;
 
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonNumber;
-import javax.json.JsonObject;
-import javax.mail.internet.AddressException;
-import javax.mail.internet.InternetAddress;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonNumber;
+import jakarta.json.JsonObject;
+import jakarta.mail.internet.AddressException;
+import jakarta.mail.internet.InternetAddress;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.core.Response.Status;
 
 @Path("admin/feedback")
 public class FeedbackApi extends AbstractApiBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
index f6eda085c95..3324523afbc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
@@ -55,26 +55,26 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.inject.Inject;
-import javax.json.Json;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import javax.ws.rs.core.UriInfo;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.inject.Inject;
+import jakarta.json.Json;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.HttpHeaders;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import jakarta.ws.rs.core.UriInfo;
 import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
 import org.glassfish.jersey.media.multipart.FormDataParam;
@@ -448,7 +448,7 @@ public Response updateFileMetadata(@Context ContainerRequestContext crc, @FormDa
                     return error(Response.Status.BAD_REQUEST, "An error has occurred attempting to update the requested DataFile. It is not part of the current version of the Dataset.");
                 }
 
-                javax.json.JsonObject jsonObject = JsonUtil.getJsonObject(jsonData);
+                jakarta.json.JsonObject jsonObject = JsonUtil.getJsonObject(jsonData);
                 String incomingLabel = jsonObject.getString("label", null);
                 String incomingDirectoryLabel = jsonObject.getString("directoryLabel", null);
                 String existingLabel = df.getFileMetadata().getLabel();
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Groups.java b/src/main/java/edu/harvard/iq/dataverse/api/Groups.java
index 5a587efadf3..d56a787c7ff 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Groups.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Groups.java
@@ -9,26 +9,26 @@
 import edu.harvard.iq.dataverse.util.json.JsonParseException;
 import edu.harvard.iq.dataverse.util.json.JsonParser;
 
-import javax.ejb.Stateless;
-import javax.interceptor.Interceptors;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.Stateless;
+import jakarta.interceptor.Interceptors;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.core.Response;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
 
 import java.util.Optional;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
-import javax.annotation.PostConstruct;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonString;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.PathParam;
+import jakarta.annotation.PostConstruct;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonString;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.PathParam;
 import static org.apache.commons.lang3.StringUtils.isNumeric;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java
index e739b1520a0..d7eec9f5757 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java
@@ -1,7 +1,6 @@
 package edu.harvard.iq.dataverse.api;
 
 import edu.harvard.iq.dataverse.Dataverse;
-import edu.harvard.iq.dataverse.DataverseServiceBean;
 import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
 
@@ -17,27 +16,27 @@
 import edu.harvard.iq.dataverse.util.StringUtil;
 import edu.harvard.iq.dataverse.util.json.JsonParseException;
 import edu.harvard.iq.dataverse.util.json.JsonPrinter;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.JsonObjectBuilder;
 import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
 import java.io.IOException;
 import java.io.StringReader;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 
 @Path("harvest/clients")
 public class HarvestingClients extends AbstractApiBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java
index f5e3e669083..308b910c425 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java
@@ -11,29 +11,29 @@
 import edu.harvard.iq.dataverse.harvest.server.OAISetServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.json.JsonParseException;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.JsonObjectBuilder;
 import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
 import java.io.IOException;
 import java.io.StringReader;
 import java.util.List;
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.json.Json;
-import javax.json.JsonReader;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.json.Json;
+import jakarta.json.JsonReader;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Index.java b/src/main/java/edu/harvard/iq/dataverse/api/Index.java
index 1361de8fbf7..4910c460b6a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Index.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Index.java
@@ -47,26 +47,24 @@
 import java.util.Set;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.core.Response.Status;
 import org.apache.solr.client.solrj.SolrServerException;
 
 @Path("admin/index")
@@ -199,7 +197,7 @@ private Response indexAllOrSubset(Long numPartitionsSelected, Long partitionIdTo
                     }
                 }
             }
-            if (sb.toString().equals("javax.ejb.EJBException: Transaction aborted javax.transaction.RollbackException java.lang.IllegalStateException ")) {
+            if (sb.toString().contains("java.lang.IllegalStateException ")) {
                 return ok("indexing went as well as can be expected... got java.lang.IllegalStateException but some indexing may have happened anyway");
             } else {
                 return error(Status.INTERNAL_SERVER_ERROR, sb.toString());
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java
index cfee582f69d..3349c34dfcc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java
@@ -4,14 +4,14 @@
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonValue;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonValue;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 
 @Path("info")
 public class Info extends AbstractApiBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/LDNInbox.java b/src/main/java/edu/harvard/iq/dataverse/api/LDNInbox.java
index 3b725468161..05d12f1083c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/LDNInbox.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/LDNInbox.java
@@ -28,20 +28,20 @@
 import java.sql.Timestamp;
 import java.util.logging.Logger;
 
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonValue;
-import javax.json.JsonWriter;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.ServiceUnavailableException;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.ForbiddenException;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonValue;
+import jakarta.json.JsonWriter;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.BadRequestException;
+import jakarta.ws.rs.ServiceUnavailableException;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.ForbiddenException;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 
 @Path("inbox")
 public class LDNInbox extends AbstractApiBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java b/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java
index a9d7eb8024c..ab50ebbf2e4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java
@@ -2,20 +2,20 @@
 
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
 
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ejb.Stateless;
+import jakarta.ws.rs.core.Response.Status;
 
 import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.authorization.users.User;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Logout.java b/src/main/java/edu/harvard/iq/dataverse/api/Logout.java
index d6d8d5cdc44..e8d8be04459 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Logout.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Logout.java
@@ -4,10 +4,10 @@
 import edu.harvard.iq.dataverse.DataverseSession;
 import edu.harvard.iq.dataverse.settings.FeatureFlags;
 
-import javax.inject.Inject;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.core.Response;
+import jakarta.inject.Inject;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.core.Response;
 
 @Path("logout")
 public class Logout extends AbstractApiBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Mail.java b/src/main/java/edu/harvard/iq/dataverse/api/Mail.java
index 3b5050b480b..5fac2f30c89 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Mail.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Mail.java
@@ -2,10 +2,10 @@
 
 import edu.harvard.iq.dataverse.MailServiceBean;
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
-import javax.ejb.EJB;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.core.Response;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java
index 0193c7774f1..6b48dbf8415 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java
@@ -18,19 +18,19 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonValue;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonValue;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.core.Response.Status;
 
 /**
  * Note that there are makeDataCount endpoints in Datasets.java as well.
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Meta.java b/src/main/java/edu/harvard/iq/dataverse/api/Meta.java
index 1ca97f2ec69..a38840ba50d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Meta.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Meta.java
@@ -15,19 +15,19 @@
 import edu.harvard.iq.dataverse.export.DDIExportServiceBean;
 
 import java.util.logging.Logger;
-import javax.ejb.EJB;
+import jakarta.ejb.EJB;
 import java.io.ByteArrayOutputStream;
 
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.ServiceUnavailableException;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.HttpHeaders;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.NotFoundException;
+import jakarta.ws.rs.ServiceUnavailableException;
 
 /*
     Custom API exceptions [NOT YET IMPLEMENTED]
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java b/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java
index b0d82b69d1b..bd937878286 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java
@@ -8,22 +8,17 @@
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetServiceBean;
 
-import java.io.IOException;
-import java.util.concurrent.Future;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.*;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.*;
 
-import javax.ws.rs.core.Response;
+import jakarta.ws.rs.core.Response;
 
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.harvest.server.OAISetServiceBean;
 import edu.harvard.iq.dataverse.harvest.server.OAISet;
-import org.apache.solr.client.solrj.SolrServerException;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java
index b3e1dad13af..448fb48e389 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse.api;
 
 import edu.harvard.iq.dataverse.MetadataBlock;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Response;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.Response;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief;
-import javax.ws.rs.PathParam;
+import jakarta.ws.rs.PathParam;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java b/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java
index e966fd200d5..7bb2570334b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java
@@ -12,22 +12,22 @@
 import java.util.List;
 import java.util.logging.Logger;
 
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.ws.rs.GET;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Request;
-import javax.ws.rs.core.Response;
-
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import javax.ws.rs.core.UriInfo;
-import javax.ws.rs.core.Variant;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.NotFoundException;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Request;
+import jakarta.ws.rs.core.Response;
+
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import jakarta.ws.rs.core.UriInfo;
+import jakarta.ws.rs.core.Variant;
 
 /**
  * API endpoints for various metrics.
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java b/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java
index 006a95d85a5..37c894d3071 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java
@@ -11,19 +11,19 @@
 import java.util.Optional;
 import java.util.Set;
 
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 
 import edu.harvard.iq.dataverse.util.MailUtil;
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Pids.java b/src/main/java/edu/harvard/iq/dataverse/api/Pids.java
index 2e6e97f7ced..534e42fd505 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Pids.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Pids.java
@@ -10,24 +10,24 @@
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.util.Arrays;
-import javax.ejb.Stateless;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.InternalServerErrorException;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.Stateless;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.InternalServerErrorException;
+import jakarta.ws.rs.NotFoundException;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 
 /**
  * PIDs are Persistent IDentifiers such as DOIs or Handles.
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java
index f0f18f781f1..37b4792920f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java
@@ -15,23 +15,23 @@
 import java.io.StringReader;
 import java.util.HashMap;
 import java.util.logging.Logger;
-import javax.inject.Inject;
-import javax.json.Json;
-import javax.json.JsonException;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
+import jakarta.inject.Inject;
+import jakarta.json.Json;
+import jakarta.json.JsonException;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
 
 @Path("files")
 public class Prov extends AbstractApiBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Roles.java b/src/main/java/edu/harvard/iq/dataverse/api/Roles.java
index b7f9e4821e5..8812f95dea1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Roles.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Roles.java
@@ -1,29 +1,26 @@
 package edu.harvard.iq.dataverse.api;
 
-import static edu.harvard.iq.dataverse.api.AbstractApiBean.error;
-
 import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.api.dto.RoleDTO;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.Permission;
-import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
 import edu.harvard.iq.dataverse.engine.command.impl.CreateRoleCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.DeleteRoleCommand;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.util.Arrays;
 import java.util.List;
-import javax.ejb.Stateless;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.Stateless;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 
 /**
  * Util API for managing roles. Might not make it to the production version.
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java
index 7ead0d23711..5d0365d022e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java
@@ -11,23 +11,23 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJBException;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Response;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
+import jakarta.ejb.EJBException;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.core.Response;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
 
 @Path("admin/savedsearches")
 public class SavedSearches extends AbstractApiBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Search.java b/src/main/java/edu/harvard/iq/dataverse/api/Search.java
index d3c9d0a4cc6..c760534ca7b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Search.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Search.java
@@ -24,19 +24,18 @@
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
-import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/SiteMap.java b/src/main/java/edu/harvard/iq/dataverse/api/SiteMap.java
index 787c3380e5b..37d6a2aa3fe 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/SiteMap.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/SiteMap.java
@@ -2,13 +2,13 @@
 
 import edu.harvard.iq.dataverse.sitemap.SiteMapServiceBean;
 import edu.harvard.iq.dataverse.sitemap.SiteMapUtil;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 
 @Stateless
 @Path("admin/sitemap")
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/StorageSites.java b/src/main/java/edu/harvard/iq/dataverse/api/StorageSites.java
index 54adeecd9f9..2915328428e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/StorageSites.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/StorageSites.java
@@ -3,16 +3,16 @@
 import edu.harvard.iq.dataverse.locality.StorageSite;
 import edu.harvard.iq.dataverse.locality.StorageSiteUtil;
 import java.util.List;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.core.Response;
 
 @Path("admin/storageSites")
 public class StorageSites extends AbstractApiBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java b/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java
index 42caa95b9f5..87be1f14e05 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java
@@ -2,20 +2,19 @@
 
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.Dataset;
-import static edu.harvard.iq.dataverse.api.AbstractApiBean.error;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.externaltools.ExternalTool;
 import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
 import java.util.List;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Response;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.core.Response;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
 
 @Path("admin/test")
 public class TestApi extends AbstractApiBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java b/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java
index 15c3b34f6af..05ba150df8e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java
@@ -18,22 +18,22 @@
 import edu.harvard.iq.dataverse.util.StringUtil;
 import java.io.BufferedInputStream;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
+import jakarta.ejb.EJB;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.nio.file.Paths;
 import java.nio.file.StandardCopyOption;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.UriInfo;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.QueryParam;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.HttpHeaders;
+import jakarta.ws.rs.core.UriInfo;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.QueryParam;
 
 
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Users.java b/src/main/java/edu/harvard/iq/dataverse/api/Users.java
index 14bf25c91b2..791fc7aa774 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Users.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Users.java
@@ -5,8 +5,6 @@
  */
 package edu.harvard.iq.dataverse.api;
 
-import static edu.harvard.iq.dataverse.api.AbstractApiBean.error;
-
 import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -15,32 +13,30 @@
 import edu.harvard.iq.dataverse.engine.command.impl.GetUserTracesCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.MergeInAccountCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.RevokeAllRolesCommand;
-import edu.harvard.iq.dataverse.metrics.MetricsUtil;
 import edu.harvard.iq.dataverse.util.FileUtil;
 
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
 
-import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.json.JsonArray;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Request;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Variant;
+import jakarta.ejb.Stateless;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.BadRequestException;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Request;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.core.Variant;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Util.java b/src/main/java/edu/harvard/iq/dataverse/api/Util.java
index 82adedc709f..25855769a38 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Util.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Util.java
@@ -7,9 +7,9 @@
 import java.util.TimeZone;
 import java.util.TreeSet;
 import java.util.stream.Collectors;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonReader;
 
 public class Util {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java b/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java
index 4269a0215bf..4eadcedf71a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java
@@ -8,11 +8,11 @@
 import java.util.Arrays;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.core.Response;
 
 /**
  * API Endpoint for external systems to report the results of workflow step
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java b/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java
index 4babe6875e2..8d5024c1c14 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java
@@ -11,18 +11,18 @@
 import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean;
 import java.util.Arrays;
 import java.util.Optional;
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonValue;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonValue;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.core.Response;
 
 /**
  * API Endpoint for managing workflows.
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java
index 60b75757f3c..0dd8a28baca 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java
@@ -7,8 +7,8 @@
 import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
 
-import javax.inject.Inject;
-import javax.ws.rs.container.ContainerRequestContext;
+import jakarta.inject.Inject;
+import jakarta.ws.rs.container.ContainerRequestContext;
 import java.util.logging.Logger;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthFilter.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthFilter.java
index dea9c571d22..34a72d718f0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthFilter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthFilter.java
@@ -3,12 +3,12 @@
 import edu.harvard.iq.dataverse.api.ApiConstants;
 import edu.harvard.iq.dataverse.authorization.users.User;
 
-import javax.annotation.Priority;
-import javax.inject.Inject;
-import javax.ws.rs.Priorities;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.container.ContainerRequestFilter;
-import javax.ws.rs.ext.Provider;
+import jakarta.annotation.Priority;
+import jakarta.inject.Inject;
+import jakarta.ws.rs.Priorities;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestFilter;
+import jakarta.ws.rs.ext.Provider;
 import java.io.IOException;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthMechanism.java
index e9bf1f39361..bd34acbf702 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthMechanism.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthMechanism.java
@@ -2,7 +2,7 @@
 
 import edu.harvard.iq.dataverse.authorization.users.User;
 
-import javax.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestContext;
 
 /**
  * @author Guillermo Portas
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthRequired.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthRequired.java
index 4deacc7f66e..bf0d785eeb3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthRequired.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthRequired.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.api.auth;
 
-import javax.ws.rs.NameBinding;
+import jakarta.ws.rs.NameBinding;
 import java.lang.annotation.ElementType;
 import java.lang.annotation.Retention;
 import java.lang.annotation.Target;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java
index c4b03728179..b5a48427fa5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java
@@ -11,9 +11,9 @@
 import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.settings.FeatureFlags;
 
-import javax.inject.Inject;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.HttpHeaders;
+import jakarta.inject.Inject;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.HttpHeaders;
 import java.io.IOException;
 import java.util.List;
 import java.util.Optional;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java
index 59384ff0336..801e2752b9e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java
@@ -3,8 +3,8 @@
 import edu.harvard.iq.dataverse.authorization.users.GuestUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
 
-import javax.inject.Inject;
-import javax.ws.rs.container.ContainerRequestContext;
+import jakarta.inject.Inject;
+import jakarta.ws.rs.container.ContainerRequestContext;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanism.java
index 53aec9235a4..c1471c3f5b3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanism.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanism.java
@@ -4,8 +4,8 @@
 import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.settings.FeatureFlags;
 
-import javax.inject.Inject;
-import javax.ws.rs.container.ContainerRequestContext;
+import jakarta.inject.Inject;
+import jakarta.ws.rs.container.ContainerRequestContext;
 
 public class SessionCookieAuthMechanism implements AuthMechanism {
     @Inject
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java
index caa58dfddf3..f8572144236 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java
@@ -7,9 +7,9 @@
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.util.UrlSignerUtil;
 
-import javax.inject.Inject;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.UriInfo;
+import jakarta.inject.Inject;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.UriInfo;
 
 import java.net.URLDecoder;
 import java.nio.charset.StandardCharsets;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java
index e673e14e677..bbd67713e85 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java
@@ -4,8 +4,8 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
 
-import javax.inject.Inject;
-import javax.ws.rs.container.ContainerRequestContext;
+import jakarta.inject.Inject;
+import jakarta.ws.rs.container.ContainerRequestContext;
 
 /**
  * @author Guillermo Portas
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java
index 18eafde6ede..40431557261 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java
@@ -3,8 +3,8 @@
 import edu.harvard.iq.dataverse.api.ApiConstants;
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
 
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 
 public class WrappedAuthErrorResponse extends Exception {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/batchjob/BatchJobResource.java b/src/main/java/edu/harvard/iq/dataverse/api/batchjob/BatchJobResource.java
index 37c29f20efe..09a60b1b700 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/batchjob/BatchJobResource.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/batchjob/BatchJobResource.java
@@ -4,17 +4,17 @@
 import edu.harvard.iq.dataverse.api.AbstractApiBean;
 import edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity;
 
-import javax.batch.operations.JobOperator;
-import javax.batch.runtime.BatchRuntime;
-import javax.batch.runtime.JobExecution;
-import javax.batch.runtime.JobInstance;
-import javax.ejb.Stateless;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.batch.operations.JobOperator;
+import jakarta.batch.runtime.BatchRuntime;
+import jakarta.batch.runtime.JobExecution;
+import jakarta.batch.runtime.JobInstance;
+import jakarta.ejb.Stateless;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/batchjob/FileRecordJobResource.java b/src/main/java/edu/harvard/iq/dataverse/api/batchjob/FileRecordJobResource.java
index 8695c7dcab7..b7a6b7cfafd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/batchjob/FileRecordJobResource.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/batchjob/FileRecordJobResource.java
@@ -6,21 +6,21 @@
 import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode;
 import edu.harvard.iq.dataverse.engine.command.impl.ImportFromFileSystemCommand;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ws.rs.DefaultValue;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
 
 @Stateless
 @Path("batch/jobs")
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java
index 6543d771ebe..5bc50903be8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java
@@ -18,12 +18,12 @@
 import edu.harvard.iq.dataverse.util.ConstraintViolationUtil;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.inject.Inject;
-import javax.servlet.http.HttpServletRequest;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.inject.Inject;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
 import org.apache.abdera.parser.ParseException;
 import org.swordapp.server.AuthCredentials;
 import org.swordapp.server.CollectionDepositManager;
@@ -174,7 +174,9 @@ public DepositReceipt createNew(String collectionUri, Deposit deposit, AuthCrede
                     // curl --insecure --data-binary "@multipart.dat" -H 'Content-Type: multipart/related; boundary="===============0670350989=="' -H "MIME-Version: 1.0" https://sword:sword@localhost:8181/dvn/api/data-deposit/v1/swordv2/collection/dataverse/sword/hdl:1902.1/12345
                     // but...
                     // "Yeah, multipart is critically broken across all implementations" -- http://www.mail-archive.com/sword-app-tech@lists.sourceforge.net/msg00327.html
-                    throw new UnsupportedOperationException("Not yet implemented");
+                    //
+                    // OB 2022-03-24 -> sword2-server v2.0 library drops support for multipart/related.
+                    throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Multipart/related RFC2387 type posts are not supported. Please POST an Atom entry instead.");
                 } else {
                     throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "expected deposit types are isEntryOnly, isBinaryOnly, and isMultiPart");
                 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java
index c7ed00a23d0..084136f2b5d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java
@@ -11,9 +11,9 @@
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.inject.Inject;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.inject.Inject;
+import jakarta.servlet.http.HttpServletRequest;
 import javax.xml.namespace.QName;
 import org.apache.abdera.Abdera;
 import org.apache.abdera.i18n.iri.IRI;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java
index b605f3717a8..4d4d1d08b51 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java
@@ -25,12 +25,12 @@
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.inject.Inject;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.inject.Inject;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.servlet.http.HttpServletRequest;
 import org.apache.abdera.parser.ParseException;
 import org.swordapp.server.AuthCredentials;
 import org.swordapp.server.ContainerManager;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java
index 482e35df781..93b7dc96563 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java
@@ -9,9 +9,6 @@
 import edu.harvard.iq.dataverse.EjbDataverseEngine;
 import edu.harvard.iq.dataverse.PermissionServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
-import edu.harvard.iq.dataverse.dataaccess.StorageIO;
-import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil;
-import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
@@ -29,12 +26,12 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.inject.Inject;
-import javax.servlet.http.HttpServletRequest;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.inject.Inject;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
 
 import edu.harvard.iq.dataverse.util.file.CreateDataFileResult;
 import org.swordapp.server.AuthCredentials;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2CollectionServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2CollectionServlet.java
index a761afd1324..c509a8d6f52 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2CollectionServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2CollectionServlet.java
@@ -2,10 +2,10 @@
 
 import java.io.IOException;
 import java.util.concurrent.locks.ReentrantLock;
-import javax.inject.Inject;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.inject.Inject;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import org.swordapp.server.CollectionAPI;
 import org.swordapp.server.servlets.SwordServlet;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java
index 441186cc63f..53dce24c0fe 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java
@@ -2,10 +2,10 @@
 
 import java.io.IOException;
 import java.util.concurrent.locks.ReentrantLock;
-import javax.inject.Inject;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.inject.Inject;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import org.swordapp.server.ContainerAPI;
 import org.swordapp.server.ContainerManager;
 import org.swordapp.server.StatementManager;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2MediaResourceServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2MediaResourceServlet.java
index c455a6fd26a..245ab6ab23b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2MediaResourceServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2MediaResourceServlet.java
@@ -2,10 +2,10 @@
 
 import java.io.IOException;
 import java.util.concurrent.locks.ReentrantLock;
-import javax.inject.Inject;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.inject.Inject;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import org.swordapp.server.MediaResourceAPI;
 import org.swordapp.server.servlets.SwordServlet;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ServiceDocumentServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ServiceDocumentServlet.java
index 37db76d3c9c..eab005d87fa 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ServiceDocumentServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ServiceDocumentServlet.java
@@ -1,10 +1,10 @@
 package edu.harvard.iq.dataverse.api.datadeposit;
 
 import java.io.IOException;
-import javax.inject.Inject;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.inject.Inject;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import org.swordapp.server.ServiceDocumentAPI;
 import org.swordapp.server.servlets.SwordServlet;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2StatementServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2StatementServlet.java
index ed1202d8c77..4bcc9c6afe8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2StatementServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2StatementServlet.java
@@ -1,10 +1,10 @@
 package edu.harvard.iq.dataverse.api.datadeposit;
 
 import java.io.IOException;
-import javax.inject.Inject;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.inject.Inject;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import org.swordapp.server.StatementAPI;
 import org.swordapp.server.StatementManager;
 import org.swordapp.server.servlets.SwordServlet;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java
index 049b20f605b..134d54aef88 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java
@@ -8,8 +8,8 @@
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.inject.Inject;
+import jakarta.ejb.EJB;
+import jakarta.inject.Inject;
 
 import org.apache.commons.lang3.StringUtils;
 import org.swordapp.server.AuthCredentials;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java
index 05864fb2da0..95763e0eafb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java
@@ -17,9 +17,9 @@
 import java.util.Optional;
 import java.util.logging.Logger;
 import static java.util.stream.Collectors.joining;
-import javax.ejb.EJB;
-import javax.inject.Inject;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.inject.Inject;
+import jakarta.servlet.http.HttpServletRequest;
 import org.apache.abdera.i18n.iri.IRI;
 import org.apache.abdera.i18n.iri.IRISyntaxException;
 import org.apache.abdera.model.AtomDate;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java
index 1e506c6a0b1..a5564e9fbdb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java
@@ -6,7 +6,7 @@
 import java.util.Arrays;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
+import jakarta.ejb.EJB;
 import org.swordapp.server.SwordConfiguration;
 
 public class SwordConfigurationImpl implements SwordConfiguration {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordFilter.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordFilter.java
new file mode 100644
index 00000000000..aa7e028a4ba
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordFilter.java
@@ -0,0 +1,49 @@
+package edu.harvard.iq.dataverse.api.datadeposit;
+
+import jakarta.servlet.Filter;
+import jakarta.servlet.FilterChain;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.ServletRequest;
+import jakarta.servlet.ServletResponse;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletRequestWrapper;
+import java.io.IOException;
+
+public class SwordFilter implements Filter {
+
+    @Override
+    public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
+        HttpServletRequest req = (HttpServletRequest) request;
+        MutateHeaders requestWrapper = new MutateHeaders(req);
+        chain.doFilter(requestWrapper, response);
+    }
+
+    /**
+     * We are mutating headers because Payara 6 is more strict than Paraya 5 and
+     * wants "attachment; filename=" instead of just "filename=". In order to
+     * not break backward compatibility, we add "attachment; " for our (SWORD)
+     * API users. (This only seem to affect our SWORD API.) That is, the can
+     * continue to send '-H "Content-Disposition: filename=example.zip"' as
+     * we've documented for years.
+     */
+    public class MutateHeaders extends HttpServletRequestWrapper {
+
+        public MutateHeaders(HttpServletRequest request) {
+            super(request);
+        }
+
+        // inspired by https://stackoverflow.com/questions/2811769/adding-an-http-header-to-the-request-in-a-servlet-filter/2811841#2811841
+        @Override
+        public String getHeader(String name) {
+            String header = super.getHeader(name);
+            if ("Content-Disposition".equalsIgnoreCase(name)) {
+                if (header.startsWith("filename=")) {
+                    header = header.replaceFirst("filename=", "attachment; filename=");
+                }
+            }
+            return (header != null) ? header : super.getParameter(name);
+        }
+
+    }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java
index 2e093dbcf36..22b6ee05e48 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java
@@ -19,10 +19,10 @@
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 import org.apache.commons.lang3.StringUtils;
 import org.swordapp.server.SwordEntry;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ConstraintViolationExceptionHandler.java b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ConstraintViolationExceptionHandler.java
index 4cbf31d1d2c..bb57059a99a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ConstraintViolationExceptionHandler.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ConstraintViolationExceptionHandler.java
@@ -1,17 +1,15 @@
 package edu.harvard.iq.dataverse.api.errorhandlers;
 
-import edu.harvard.iq.dataverse.util.json.JsonPrinter;
-
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.ext.ExceptionMapper;
+import jakarta.ws.rs.ext.Provider;
 import java.util.List;
 import java.util.stream.Collectors;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/JsonParseExceptionHandler.java b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/JsonParseExceptionHandler.java
index 286272d9de3..2f974a1c5be 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/JsonParseExceptionHandler.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/JsonParseExceptionHandler.java
@@ -2,17 +2,14 @@
 
 import edu.harvard.iq.dataverse.util.json.JsonParseException;
 
-import javax.json.Json;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
-import java.util.UUID;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+import jakarta.json.Json;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.BadRequestException;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.ext.ExceptionMapper;
+import jakarta.ws.rs.ext.Provider;
 
 /**
  * Make a failing JSON parsing request appear to be a BadRequest (error code 400)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ThrowableHandler.java b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ThrowableHandler.java
index 4064ee21474..8e43a1876bf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ThrowableHandler.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ThrowableHandler.java
@@ -2,11 +2,11 @@
 
 import edu.harvard.iq.dataverse.api.util.JsonResponseBuilder;
 
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.ext.ExceptionMapper;
+import jakarta.ws.rs.ext.Provider;
 import java.util.Optional;
 import java.util.logging.Level;
 import java.util.logging.Logger;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/WebApplicationExceptionHandler.java b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/WebApplicationExceptionHandler.java
index 5f28bfd0afc..e67e91e63c9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/WebApplicationExceptionHandler.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/WebApplicationExceptionHandler.java
@@ -8,12 +8,12 @@
 import edu.harvard.iq.dataverse.api.util.JsonResponseBuilder;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.WebApplicationException;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.ext.ExceptionMapper;
+import jakarta.ws.rs.ext.Provider;
 import java.util.Optional;
 import java.util.logging.Level;
 import java.util.logging.Logger;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldMap.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldMap.java
index fc96215cef0..2bea36a6047 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldMap.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldMap.java
@@ -7,14 +7,14 @@
 package edu.harvard.iq.dataverse.api.imports;
 
 import java.io.Serializable;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldServiceBean.java
index e7b8e71495b..240baeefcff 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldServiceBean.java
@@ -5,9 +5,9 @@
  */
 package edu.harvard.iq.dataverse.api.imports;
 
-import javax.ejb.Stateless;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Stateless;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java
index 39b9ddbcf5f..8f7934dd528 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java
@@ -22,15 +22,14 @@
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
 import javax.xml.stream.XMLStreamConstants;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamReader;
 import javax.xml.stream.XMLInputFactory;
 
-import edu.harvard.iq.dataverse.util.json.ControlledVocabularyException;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java
index 57d7714ba77..f7a6cf54dd5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java
@@ -11,7 +11,6 @@
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.ForeignMetadataFieldMapping;
 import edu.harvard.iq.dataverse.ForeignMetadataFormatMapping;
-import edu.harvard.iq.dataverse.GlobalId;
 import edu.harvard.iq.dataverse.HandlenetServiceBean;
 import edu.harvard.iq.dataverse.MetadataBlockServiceBean;
 import edu.harvard.iq.dataverse.api.dto.*;  
@@ -32,19 +31,19 @@
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
 import javax.xml.stream.XMLStreamConstants;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamReader;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
 import javax.xml.stream.XMLInputFactory;
 import net.handle.hdllib.HandleException;
 import net.handle.hdllib.HandleResolver;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java
index cb6cef6ded5..bcb67b180c8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java
@@ -51,23 +51,23 @@
 import java.util.logging.Level;
 import java.util.logging.LogRecord;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import static javax.ejb.TransactionAttributeType.REQUIRES_NEW;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
-import javax.validation.Validation;
-import javax.validation.Validator;
-import javax.validation.ValidatorFactory;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonReader;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
+import jakarta.validation.ValidatorFactory;
 import javax.xml.stream.XMLStreamException;
 import org.apache.commons.lang3.StringUtils;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java b/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java
index aef17d1ab34..71a010b7e6d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java
@@ -2,14 +2,14 @@
 
 import edu.harvard.iq.dataverse.api.ApiBlockingFilter;
 
-import javax.json.Json;
-import javax.json.JsonValue;
-import javax.json.JsonObjectBuilder;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonValue;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.servlet.ServletResponse;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 
 import org.apache.commons.lang3.exception.ExceptionUtils;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java
index 15d1cb07a11..a2cf3082ae7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java
@@ -8,14 +8,14 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.logging.SimpleFormatter;
-import javax.inject.Inject;
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.inject.Inject;
+import jakarta.servlet.Filter;
+import jakarta.servlet.FilterChain;
+import jakarta.servlet.FilterConfig;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.ServletRequest;
+import jakarta.servlet.ServletResponse;
+import jakarta.servlet.http.HttpServletRequest;
 
 public class AuthFilter implements Filter {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthTestDataServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthTestDataServiceBean.java
index 3715900733c..9cee3ec67c7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthTestDataServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthTestDataServiceBean.java
@@ -15,7 +15,7 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
+import jakarta.ejb.Stateless;
 import org.apache.commons.lang3.StringUtils;
 
 @Stateless
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserLookup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserLookup.java
index 94a773bc977..3291dd2efbf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserLookup.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserLookup.java
@@ -2,17 +2,17 @@
 
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import java.io.Serializable;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
-import javax.persistence.UniqueConstraint;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.UniqueConstraint;
 
 /**
  * A somewhat glorified key-value pair, persisted in the database.
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java
index 6289865baf0..a93d01527a0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java
@@ -22,15 +22,15 @@
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Lock;
-import static javax.ejb.LockType.READ;
-import static javax.ejb.LockType.WRITE;
-import javax.ejb.Singleton;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Lock;
+import static jakarta.ejb.LockType.READ;
+import static jakarta.ejb.LockType.WRITE;
+import jakarta.ejb.Singleton;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
index 9bf53116efa..106a83a4ad1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
@@ -6,31 +6,23 @@
 import edu.harvard.iq.dataverse.RoleAssigneeServiceBean;
 import edu.harvard.iq.dataverse.UserNotificationServiceBean;
 import edu.harvard.iq.dataverse.UserServiceBean;
-import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactory;
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
 import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean;
 import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationFailedException;
-import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationProviderFactoryNotFoundException;
-import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationSetupException;
 import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroup;
 import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean;
 import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderFactory;
-import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderRow;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider;
-import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProviderFactory;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.PasswordEncryption;
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.AbstractOAuth2AuthenticationProvider;
-import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2AuthenticationProviderFactory;
 import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProvider;
-import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProviderFactory;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailData;
 import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean;
-import edu.harvard.iq.dataverse.engine.command.impl.RevokeAllRolesCommand;
 import edu.harvard.iq.dataverse.passwordreset.PasswordResetData;
 import edu.harvard.iq.dataverse.passwordreset.PasswordResetServiceBean;
 import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean;
@@ -44,7 +36,6 @@
 import java.util.Calendar;
 import java.util.Collection;
 import java.util.Date;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
@@ -53,21 +44,21 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.TypedQuery;
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
-import javax.validation.ValidatorFactory;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.TypedQuery;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
+import jakarta.validation.ValidatorFactory;
 
 /**
  * AuthenticationService is for general authentication-related operations.
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java
index 12ddf817221..ff1a5546f38 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java
@@ -11,19 +11,19 @@
 import java.util.MissingResourceException;
 import java.util.Objects;
 import java.util.Set;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
-import javax.validation.constraints.Pattern;
-import javax.validation.constraints.Size;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
+import jakarta.validation.constraints.Pattern;
+import jakarta.validation.constraints.Size;
 
 /**
  * A role is an annotated set of permissions. A role belongs
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRolePermissionHelper.java b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRolePermissionHelper.java
index 4e6b54a8d49..966247bce2e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRolePermissionHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRolePermissionHelper.java
@@ -4,21 +4,17 @@
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.Dataverse;
-import edu.harvard.iq.dataverse.DataverseRoleServiceBean;
-import edu.harvard.iq.dataverse.authorization.DataverseRole;
-import java.sql.Array;
-import java.util.AbstractMap;
+
 import java.util.ArrayList;
 import java.util.HashMap;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
 import org.apache.commons.lang3.StringUtils;
 
 /*
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java
index 66293a4f781..a746eee0a60 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java
@@ -26,11 +26,11 @@
 import java.util.logging.Logger;
 import static java.util.stream.Collectors.toSet;
 import java.util.stream.Stream;
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/PersistedGlobalGroup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/PersistedGlobalGroup.java
index 52785d5c7e2..1ef3b01d752 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/PersistedGlobalGroup.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/PersistedGlobalGroup.java
@@ -3,14 +3,14 @@
 import edu.harvard.iq.dataverse.authorization.groups.Group;
 import edu.harvard.iq.dataverse.authorization.RoleAssigneeDisplayInfo;
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  * Convenience base class for implementing groups that apply to the entire Dataverse
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroup.java
index 43705a2240e..2723561d8b4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroup.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroup.java
@@ -13,24 +13,24 @@
 import java.util.Objects;
 import java.util.Set;
 import java.util.TreeSet;
-import javax.persistence.Column;
-import javax.persistence.ElementCollection;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.JoinTable;
-import javax.persistence.ManyToMany;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.PostLoad;
-import javax.persistence.PrePersist;
-import javax.persistence.Table;
-import javax.persistence.Transient;
-import javax.validation.constraints.Pattern;
+import jakarta.persistence.Column;
+import jakarta.persistence.ElementCollection;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.JoinTable;
+import jakarta.persistence.ManyToMany;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.PostLoad;
+import jakarta.persistence.PrePersist;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
+import jakarta.validation.constraints.Pattern;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java
index b7c1b46b3a7..a688fac0e34 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java
@@ -10,17 +10,16 @@
 import java.util.List;
 import java.util.Set;
 import java.util.TreeSet;
-import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
 import static java.util.stream.Collectors.joining;
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
 
 /**
  * A bean providing the {@link ExplicitGroupProvider}s with container services,
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroup.java
index a3231557898..038fbbfc6e0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroup.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroup.java
@@ -12,12 +12,12 @@
 import java.util.HashSet;
 import java.util.Objects;
 import java.util.Set;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.Transient;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.Transient;
 
 @NamedQueries({
     @NamedQuery(name="IpGroup.findAll",
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java
index c03cf26e11e..15282045b3a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java
@@ -10,12 +10,12 @@
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
 
 /**
  * Provides CRUD tools to efficiently manage IP groups in a Java EE container.
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java
index 3ecd7689e1c..8694b7d455b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip;
 
 import java.math.BigInteger;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  * A range of IPv4 addresses. In order to make SQL querying efficient, the actual fields
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6Range.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6Range.java
index d1301d550c7..379c64a88cf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6Range.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6Range.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip;
 
 import java.io.Serializable;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  * 
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRange.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRange.java
index b71dbcd0eba..fc21397898f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRange.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRange.java
@@ -2,8 +2,8 @@
 
 import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup;
 import java.util.Objects;
-import javax.persistence.ManyToOne;
-import javax.persistence.MappedSuperclass;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.MappedSuperclass;
 
 /**
  * A range of {@link IpAddress}es. Abstract class - to instantiate, you need to
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroup.java
index def11c57076..15b2fd1810c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroup.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroup.java
@@ -6,11 +6,11 @@
 import java.util.Arrays;
 import java.util.List;
 import java.util.Objects;
-import javax.persistence.Entity;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Transient;
-import javax.validation.constraints.NotEmpty;
+import jakarta.persistence.Entity;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Transient;
+import jakarta.validation.constraints.NotEmpty;
 ;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBean.java
index 58e72b7b575..b1b1e883705 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBean.java
@@ -9,14 +9,14 @@
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
-import javax.annotation.PostConstruct;
-import javax.ejb.*;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
-import javax.ws.rs.NotFoundException;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.*;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.ws.rs.NotFoundException;
 
 /**
  * A bean providing the {@link MailDomainGroupProvider}s with container services, such as database connectivity.
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroup.java
index 79fda0ca7d7..30850f0fb20 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroup.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroup.java
@@ -1,17 +1,16 @@
 package edu.harvard.iq.dataverse.authorization.groups.impl.shib;
 
-import edu.harvard.iq.dataverse.authorization.RoleAssignee;
 import edu.harvard.iq.dataverse.authorization.RoleAssigneeDisplayInfo;
 import edu.harvard.iq.dataverse.authorization.groups.Group;
 import edu.harvard.iq.dataverse.authorization.groups.GroupProvider;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Transient;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Transient;
 
 /**
  * Persistence for Shibboleth groups.
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java
index c15e56ee7e0..7a7844b7c1e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java
@@ -11,14 +11,14 @@
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 /**
  * @todo Consider merging this bean into the newer and more generic
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/AuthenticationProviderRow.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/AuthenticationProviderRow.java
index 6b9c545b7f9..2f37c777877 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/AuthenticationProviderRow.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/AuthenticationProviderRow.java
@@ -2,13 +2,13 @@
 
 import edu.harvard.iq.dataverse.authorization.AuthenticationProvider;
 import java.util.Objects;
-import javax.persistence.Entity;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.Lob;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.Lob;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  * Database-storable form of an {@code AuthenticationProvider}.
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java
index c2510b8b043..2ce36997ea9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java
@@ -5,20 +5,20 @@
 import edu.harvard.iq.dataverse.passwordreset.PasswordResetData;
 
 import java.io.Serializable;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
-import javax.persistence.Transient;
-import javax.validation.constraints.NotBlank;
-import javax.validation.constraints.Size;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
+import jakarta.validation.constraints.NotBlank;
+import jakarta.validation.constraints.Size;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUserServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUserServiceBean.java
index c39c7cb2985..ffbc5d7a027 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUserServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUserServiceBean.java
@@ -9,17 +9,17 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
-import javax.validation.ValidatorFactory;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
+import jakarta.validation.ValidatorFactory;
 
 /**
  *
@@ -88,7 +88,7 @@ public BuiltinUser findByUserName(String userName) {
             return em.createNamedQuery("BuiltinUser.findByUserName", BuiltinUser.class)
                     .setParameter("userName", userName)
                     .getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (NoResultException e) {
             return null;
         } catch (NonUniqueResultException ex) {
             logger.log(Level.WARNING, "multiple accounts found for username {0}", userName);
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java
index 5c0f3a49f76..dc4644dfccd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java
@@ -51,15 +51,15 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 import org.apache.commons.lang3.StringUtils;
 import org.hibernate.validator.constraints.NotBlank;
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java
index 01139cd2e27..48efe1e2592 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java
@@ -14,7 +14,7 @@
 import edu.harvard.iq.dataverse.authorization.AuthenticationProviderDisplayInfo;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
-import javax.validation.constraints.NotNull;
+import jakarta.validation.constraints.NotNull;
 import java.io.IOException;
 import java.util.*;
 import java.util.concurrent.ExecutionException;
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java
index 54ba3ec6a05..821e8a5ea6c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java
@@ -30,14 +30,14 @@
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.enterprise.context.SessionScoped;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.inject.Named;
-import javax.inject.Inject;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.SessionScoped;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.inject.Named;
+import jakarta.inject.Inject;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java
index c5be41a014a..99df2375a79 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java
@@ -20,12 +20,12 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import static java.util.stream.Collectors.toList;
-import javax.ejb.EJB;
-import javax.inject.Named;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.servlet.http.HttpServletRequest;
-import javax.validation.constraints.NotNull;
+import jakarta.ejb.EJB;
+import jakarta.inject.Named;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.validation.constraints.NotNull;
 
 import static edu.harvard.iq.dataverse.util.StringUtil.toOption;
 import edu.harvard.iq.dataverse.util.SystemConfig;
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenData.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenData.java
index a5ee5ddf537..59f659ff297 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenData.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenData.java
@@ -4,14 +4,14 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import java.io.Serializable;
 import java.sql.Timestamp;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
 
 /**
  * Token data for a given user, received from an OAuth2 system. Contains the 
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenDataServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenDataServiceBean.java
index d8f1fa7600b..b1dcb6df8cc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenDataServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenDataServiceBean.java
@@ -2,10 +2,10 @@
 
 import java.util.List;
 import java.util.Optional;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  * CRUD for {@link OAuth2TokenData}.
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2AP.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2AP.java
index 62f3cc382e2..8829a25336b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2AP.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2AP.java
@@ -9,9 +9,9 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.StringReader;
 import java.util.Collections;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
 
 /**
  * IDP adaptor for GitHub.com
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2AP.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2AP.java
index 1fa5470d551..a864ecb810a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2AP.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2AP.java
@@ -8,9 +8,9 @@
 import java.io.StringReader;
 import java.util.Arrays;
 import java.util.UUID;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/MicrosoftOAuth2AP.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/MicrosoftOAuth2AP.java
index da260a9fb0e..bd3caccc220 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/MicrosoftOAuth2AP.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/MicrosoftOAuth2AP.java
@@ -8,9 +8,9 @@
 import java.util.Collections;
 import java.util.logging.Logger;
 import java.io.StringReader;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
 import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java
index 02177ee0032..089ca40e164 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java
@@ -23,10 +23,10 @@
 import static java.util.stream.Collectors.joining;
 import java.util.stream.IntStream;
 import java.util.stream.Stream;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.validation.constraints.NotNull;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.validation.constraints.NotNull;
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.ParserConfigurationException;
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java
index 3e986a15689..0921b2c6683 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java
@@ -4,17 +4,11 @@
 import com.google.gson.JsonElement;
 import com.google.gson.JsonParser;
 import edu.harvard.iq.dataverse.authorization.AuthTestDataServiceBean;
-import edu.harvard.iq.dataverse.authorization.AuthenticationRequest;
-import edu.harvard.iq.dataverse.authorization.AuthenticationResponse;
 import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
-import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationFailedException;
-import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
-import edu.harvard.iq.dataverse.authorization.providers.builtin.PasswordEncryption;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.IOException;
 import java.io.InputStream;
@@ -25,11 +19,11 @@
 import java.util.Map;
 import java.util.UUID;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 
 @Named
 @Stateless
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java
index f8b30710656..fff135e0dec 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java
@@ -12,7 +12,7 @@
 import java.util.Map;
 import java.util.UUID;
 import java.util.logging.Logger;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletRequest;
 
 public class ShibUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/ApiToken.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/ApiToken.java
index fc7ed8a9060..0de7d7754a1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/ApiToken.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/ApiToken.java
@@ -2,18 +2,18 @@
 
 import java.io.Serializable;
 import java.sql.Timestamp;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
-import javax.validation.constraints.NotNull;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
+import jakarta.validation.constraints.NotNull;
 
 @Entity
 @NamedQueries({
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java
index 9fdfce2f1a7..89429b912f6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java
@@ -23,22 +23,22 @@
 import java.util.Objects;
 import java.util.Set;
 
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.OneToOne;
-import javax.persistence.PostLoad;
-import javax.persistence.PrePersist;
-import javax.persistence.Transient;
-import javax.validation.constraints.NotNull;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.PostLoad;
+import jakarta.persistence.PrePersist;
+import jakarta.persistence.Transient;
+import jakarta.validation.constraints.NotNull;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/entities/JobExecutionEntity.java b/src/main/java/edu/harvard/iq/dataverse/batch/entities/JobExecutionEntity.java
index be2167fa4d5..debece131d3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/batch/entities/JobExecutionEntity.java
+++ b/src/main/java/edu/harvard/iq/dataverse/batch/entities/JobExecutionEntity.java
@@ -1,10 +1,10 @@
 package edu.harvard.iq.dataverse.batch.entities;
 
-import javax.batch.operations.JobOperator;
-import javax.batch.runtime.BatchRuntime;
-import javax.batch.runtime.BatchStatus;
-import javax.batch.runtime.JobExecution;
-import javax.batch.runtime.StepExecution;
+import jakarta.batch.operations.JobOperator;
+import jakarta.batch.runtime.BatchRuntime;
+import jakarta.batch.runtime.BatchStatus;
+import jakarta.batch.runtime.JobExecution;
+import jakarta.batch.runtime.StepExecution;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.LinkedHashMap;
diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/entities/StepExecutionEntity.java b/src/main/java/edu/harvard/iq/dataverse/batch/entities/StepExecutionEntity.java
index 65ed3f32e1b..ba20386ed07 100644
--- a/src/main/java/edu/harvard/iq/dataverse/batch/entities/StepExecutionEntity.java
+++ b/src/main/java/edu/harvard/iq/dataverse/batch/entities/StepExecutionEntity.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.batch.entities;
 
-import javax.batch.runtime.BatchStatus;
-import javax.batch.runtime.Metric;
-import javax.batch.runtime.StepExecution;
+import jakarta.batch.runtime.BatchStatus;
+import jakarta.batch.runtime.Metric;
+import jakarta.batch.runtime.StepExecution;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java
index a5ba9a00bd2..593a5cbfdc3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java
+++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java
@@ -40,22 +40,22 @@
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
 
-import javax.batch.api.BatchProperty;
-import javax.batch.api.chunk.listener.ItemReadListener;
-import javax.batch.api.listener.JobListener;
-import javax.batch.api.listener.StepListener;
-import javax.batch.operations.JobOperator;
-import javax.batch.runtime.BatchRuntime;
-import javax.batch.runtime.BatchStatus;
-import javax.batch.runtime.JobExecution;
-import javax.batch.runtime.StepExecution;
-import javax.batch.runtime.context.JobContext;
-import javax.batch.runtime.context.StepContext;
-import javax.ejb.EJB;
-import javax.enterprise.context.Dependent;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.batch.api.BatchProperty;
+import jakarta.batch.api.chunk.listener.ItemReadListener;
+import jakarta.batch.api.listener.JobListener;
+import jakarta.batch.api.listener.StepListener;
+import jakarta.batch.operations.JobOperator;
+import jakarta.batch.runtime.BatchRuntime;
+import jakarta.batch.runtime.BatchStatus;
+import jakarta.batch.runtime.JobExecution;
+import jakarta.batch.runtime.StepExecution;
+import jakarta.batch.runtime.context.JobContext;
+import jakarta.batch.runtime.context.StepContext;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.Dependent;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import org.apache.commons.io.IOUtils;
@@ -74,8 +74,8 @@
 import java.util.logging.Handler;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.batch.operations.JobSecurityException;
-import javax.batch.operations.NoSuchJobExecutionException;
+import jakarta.batch.operations.JobSecurityException;
+import jakarta.batch.operations.NoSuchJobExecutionException;
 
 @Named
 @Dependent
diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordProcessor.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordProcessor.java
index af7caf32a7c..e5db80b9aa6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordProcessor.java
+++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordProcessor.java
@@ -25,15 +25,15 @@
 import edu.harvard.iq.dataverse.DatasetServiceBean;
 import edu.harvard.iq.dataverse.DatasetVersion;
 
-import javax.annotation.PostConstruct;
-import javax.batch.api.chunk.ItemProcessor;
-import javax.batch.operations.JobOperator;
-import javax.batch.runtime.BatchRuntime;
-import javax.batch.runtime.context.JobContext;
-import javax.ejb.EJB;
-import javax.enterprise.context.Dependent;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.annotation.PostConstruct;
+import jakarta.batch.api.chunk.ItemProcessor;
+import jakarta.batch.operations.JobOperator;
+import jakarta.batch.runtime.BatchRuntime;
+import jakarta.batch.runtime.context.JobContext;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.Dependent;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import java.io.File;
 import java.util.Properties;
 import java.util.logging.Level;
diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java
index a4f8ffd2378..fb702c21df2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java
@@ -28,17 +28,17 @@
 import org.apache.commons.io.filefilter.NotFileFilter;
 import org.apache.commons.io.filefilter.WildcardFileFilter;
 
-import javax.annotation.PostConstruct;
-import javax.batch.api.BatchProperty;
-import javax.batch.api.chunk.AbstractItemReader;
-import javax.batch.operations.JobOperator;
-import javax.batch.runtime.BatchRuntime;
-import javax.batch.runtime.context.JobContext;
-import javax.batch.runtime.context.StepContext;
-import javax.ejb.EJB;
-import javax.enterprise.context.Dependent;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.annotation.PostConstruct;
+import jakarta.batch.api.BatchProperty;
+import jakarta.batch.api.chunk.AbstractItemReader;
+import jakarta.batch.operations.JobOperator;
+import jakarta.batch.runtime.BatchRuntime;
+import jakarta.batch.runtime.context.JobContext;
+import jakarta.batch.runtime.context.StepContext;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.Dependent;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import java.io.File;
 import java.io.FileFilter;
 import java.io.Serializable;
diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java
index 195ed57bd43..ba34a3d1ed1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java
@@ -36,17 +36,17 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.FileUtil;
 
-import javax.annotation.PostConstruct;
-import javax.batch.api.BatchProperty;
-import javax.batch.api.chunk.AbstractItemWriter;
-import javax.batch.operations.JobOperator;
-import javax.batch.runtime.BatchRuntime;
-import javax.batch.runtime.context.JobContext;
-import javax.batch.runtime.context.StepContext;
-import javax.ejb.EJB;
-import javax.enterprise.context.Dependent;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.annotation.PostConstruct;
+import jakarta.batch.api.BatchProperty;
+import jakarta.batch.api.chunk.AbstractItemWriter;
+import jakarta.batch.operations.JobOperator;
+import jakarta.batch.runtime.BatchRuntime;
+import jakarta.batch.runtime.context.JobContext;
+import jakarta.batch.runtime.context.StepContext;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.Dependent;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import java.io.File;
 import java.io.Serializable;
 import java.sql.Timestamp;
@@ -57,7 +57,7 @@
 import java.util.Properties;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletRequest;
 import edu.harvard.iq.dataverse.GlobalIdServiceBean;
 
 @Named
diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java b/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java
index a2f76ca953d..19d1112ba54 100644
--- a/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java
@@ -25,7 +25,7 @@
 import edu.harvard.iq.dataverse.engine.command.Command;
 import org.apache.commons.io.FileUtils;
 
-import javax.batch.runtime.JobExecution;
+import jakarta.batch.runtime.JobExecution;
 import java.io.File;
 import java.io.IOException;
 import java.text.SimpleDateFormat;
diff --git a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java
index 3cb071fe03f..c230229abf9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java
@@ -6,7 +6,7 @@
 import java.util.Arrays;
 import java.util.logging.Logger;
 
-import javax.mail.internet.InternetAddress;
+import jakarta.mail.internet.InternetAddress;
 
 public class BrandingUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java
index 274970f8b8e..7729ab4763e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java
@@ -1,9 +1,9 @@
 package edu.harvard.iq.dataverse.branding;
 
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Singleton;
-import javax.ejb.Startup;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.ejb.Startup;
 
 import edu.harvard.iq.dataverse.DataverseServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
diff --git a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailData.java b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailData.java
index c05750c13e6..0ad9ab59f4b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailData.java
+++ b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailData.java
@@ -5,17 +5,17 @@
 import java.sql.Timestamp;
 import java.util.Date;
 import java.util.UUID;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java
index 07aea0d5011..b76e3db1379 100644
--- a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java
@@ -7,10 +7,10 @@
 import edu.harvard.iq.dataverse.util.JsfHelper;
 import java.util.Arrays;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java
index e1053c3a93f..a54fd6bb0c1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java
@@ -1,6 +1,5 @@
 package edu.harvard.iq.dataverse.confirmemail;
 
-import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DataverseServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
@@ -16,13 +15,13 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardDatamovePage.java b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardDatamovePage.java
index 54e3114a0ae..6fc80312bf5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardDatamovePage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardDatamovePage.java
@@ -8,7 +8,6 @@
 import edu.harvard.iq.dataverse.EjbDataverseEngine;
 import edu.harvard.iq.dataverse.PermissionsWrapper;
 import edu.harvard.iq.dataverse.SettingsWrapper;
-import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
@@ -22,16 +21,16 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.servlet.http.HttpServletRequest;
 
 @ViewScoped
 @Named("DashboardDatamovePage")
diff --git a/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardUsersPage.java b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardUsersPage.java
index 5b5a21e21bf..477e4c0fdd6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardUsersPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardUsersPage.java
@@ -24,10 +24,10 @@
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 @ViewScoped
 @Named("DashboardUsersPage")
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java
index 8ee3f0cf53c..d95df1567bd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java
@@ -565,21 +565,26 @@ private String getDatasetDirectory() throws IOException {
         if (isDirectAccess()) {
             throw new IOException("No DvObject defined in the Data Access Object");
         }
-
-        Path datasetDirectoryPath=null;
         
+        String authorityForFS = null;
+        String identifierForFS = null;
         if (dvObject instanceof Dataset) {
-            datasetDirectoryPath = Paths.get(this.getDataset().getAuthorityForFileStorage(), this.getDataset().getIdentifierForFileStorage());
+            authorityForFS = this.getDataset().getAuthorityForFileStorage();
+            identifierForFS = this.getDataset().getIdentifierForFileStorage();
         } else if (dvObject instanceof DataFile) {
-            datasetDirectoryPath = Paths.get(this.getDataFile().getOwner().getAuthorityForFileStorage(), this.getDataFile().getOwner().getIdentifierForFileStorage());
+            authorityForFS = this.getDataFile().getOwner().getAuthorityForFileStorage();
+            identifierForFS = this.getDataFile().getOwner().getIdentifierForFileStorage();
         } else if (dvObject instanceof Dataverse) {
             throw new IOException("FileAccessIO: Dataverses are not a supported dvObject");
         }
-            
-        if (datasetDirectoryPath == null) {
+        
+        if (authorityForFS == null || identifierForFS == null) {
             throw new IOException("Could not determine the filesystem directory of the parent dataset.");
         }
-        String datasetDirectory = Paths.get(getFilesRootDirectory(), datasetDirectoryPath.toString()).toString();
+        
+        // Determine the final directory tree. As of JDK 16, the first component of the path MUST be non-null
+        // (we check for that via the setting), but also the others make no sense if they are null.
+        String datasetDirectory = Paths.get(getFilesRootDirectory(), authorityForFS, identifierForFS).toString();
 
         if (dvObject.getStorageIdentifier() == null || dvObject.getStorageIdentifier().isEmpty()) {
             throw new IOException("Data Access: No local storage identifier defined for this datafile.");
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java
index f6bca84941e..822ada0b83e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java
@@ -69,9 +69,9 @@
 import org.eclipse.microprofile.config.Config;
 import org.eclipse.microprofile.config.ConfigProvider;
 
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.validation.constraints.NotNull;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.validation.constraints.NotNull;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBean.java
index c33b4e0fc71..bf5d4a0d6ab 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBean.java
@@ -6,8 +6,8 @@
 import com.mashape.unirest.http.Unirest;
 import com.mashape.unirest.http.exceptions.UnirestException;
 import java.io.Serializable;
-import javax.ejb.Stateless;
-import javax.inject.Named;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
 
 /**
  * This class contains all the methods that have external runtime dependencies
diff --git a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java
index 1aa384d205e..460e4727afc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java
@@ -8,9 +8,9 @@
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.Arrays;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
 
 public class DataCaptureModuleUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 4c2510b6ccb..98d5afc47e6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -17,7 +17,6 @@
 import edu.harvard.iq.dataverse.PermissionServiceBean;
 import edu.harvard.iq.dataverse.api.ApiConstants;
 import edu.harvard.iq.dataverse.api.Util;
-import edu.harvard.iq.dataverse.api.Files;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
@@ -48,23 +47,22 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import javax.ejb.Asynchronous;
-import javax.ejb.EJBException;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonNumber;
-import javax.json.JsonObject;
-import javax.json.JsonArray;
-import javax.json.JsonObjectBuilder;
-import javax.validation.ConstraintViolation;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJBException;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonNumber;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.validation.ConstraintViolation;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
 import org.apache.commons.io.IOUtils;
-import org.ocpsoft.common.util.Strings;
 
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
 
 /**
  *  Methods to add or replace a single file.
@@ -1217,7 +1215,7 @@ private boolean step_030_createNewFilesViaIngest(){
             initialFileList = result.getDataFiles();
 
         } catch (IOException ex) {
-            if (!Strings.isNullOrEmpty(ex.getMessage())) {
+            if (ex.getMessage() != null && !ex.getMessage().isEmpty()) {
                 this.addErrorSevere(getBundleErr("ingest_create_file_err") + " " + ex.getMessage());
             } else {
                 this.addErrorSevere(getBundleErr("ingest_create_file_err"));
@@ -2152,7 +2150,7 @@ public Response addFiles(String jsonData, Dataset dataset, User authUser) {
 
             }
         }
-        catch ( javax.json.stream.JsonParsingException ex) {
+        catch ( jakarta.json.stream.JsonParsingException ex) {
             ex.printStackTrace();
             return error(BAD_REQUEST, "Json Parsing Exception :" + ex.getMessage());
         }
@@ -2321,7 +2319,7 @@ public Response replaceFiles(String jsonData, Dataset ds, User authUser) {
 
             }
         }
-        catch ( javax.json.stream.JsonParsingException ex) {
+        catch ( jakarta.json.stream.JsonParsingException ex) {
             ex.printStackTrace();
             return error(BAD_REQUEST, "Json Parsing Exception :" + ex.getMessage());
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/CategoryMetadata.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/CategoryMetadata.java
index 5e03899b790..a9b1694d842 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/CategoryMetadata.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/CategoryMetadata.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse.datavariable;
 
-import javax.persistence.Index;
-import javax.persistence.Entity;
-import javax.persistence.Table;
-import javax.persistence.GenerationType;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Id;
-import javax.persistence.ManyToOne;
-import javax.persistence.JoinColumn;
+import jakarta.persistence.Index;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Table;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.Id;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.JoinColumn;
 
 @Entity
 @Table(indexes = {@Index(columnList="category_id"), @Index(columnList="variablemetadata_id")})
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/DataVariable.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/DataVariable.java
index 6462f690cac..b2e9441a163 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/DataVariable.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/DataVariable.java
@@ -8,22 +8,22 @@
 
 import java.io.Serializable;
 import java.util.Collection;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
 import org.hibernate.validator.constraints.NotBlank;
 import edu.harvard.iq.dataverse.DataTable;
 import java.util.ArrayList;
 import java.util.List;
-import javax.persistence.Column;
-import javax.persistence.Index;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Index;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/SummaryStatistic.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/SummaryStatistic.java
index bf81aff3e2b..6896ef360ce 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/SummaryStatistic.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/SummaryStatistic.java
@@ -7,14 +7,14 @@
 package edu.harvard.iq.dataverse.datavariable;
 
 import java.io.Serializable;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 /*
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VarGroup.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VarGroup.java
index 242110e333f..b52c76930d7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VarGroup.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VarGroup.java
@@ -1,14 +1,14 @@
 package edu.harvard.iq.dataverse.datavariable;
 
-import javax.persistence.Entity;
-import javax.persistence.Table;
-import javax.persistence.Index;
-import javax.persistence.Id;
-import javax.persistence.ManyToOne;
-import javax.persistence.JoinColumn;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Table;
+import jakarta.persistence.Index;
+import jakarta.persistence.Id;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Column;
 import java.util.HashSet;
 import java.util.Set;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableCategory.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableCategory.java
index 6a3e702a561..5ccef82b5d1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableCategory.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableCategory.java
@@ -8,16 +8,16 @@
 
 import java.io.Serializable;
 import java.util.List;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
 
 import edu.harvard.iq.dataverse.util.AlphaNumericComparator;
-import javax.persistence.Index;
-import javax.persistence.Table;
+import jakarta.persistence.Index;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java
index c18355c9979..29e821c28a4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java
@@ -2,18 +2,18 @@
 
 import java.io.Serializable;
 
-import javax.persistence.Entity;
-import javax.persistence.Table;
-import javax.persistence.Index;
-import javax.persistence.UniqueConstraint;
-import javax.persistence.Id;
-import javax.persistence.ManyToOne;
-import javax.persistence.JoinColumn;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Column;
-import javax.persistence.OneToMany;
-import javax.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Table;
+import jakarta.persistence.Index;
+import jakarta.persistence.UniqueConstraint;
+import jakarta.persistence.Id;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Column;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.CascadeType;
 
 import java.util.Collection;
 import java.util.ArrayList;
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRange.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRange.java
index 17098e6af54..eb04eac846b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRange.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRange.java
@@ -7,14 +7,14 @@
 package edu.harvard.iq.dataverse.datavariable;
 
 import java.io.Serializable;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRangeItem.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRangeItem.java
index 81db4225515..d5f99f7e016 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRangeItem.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRangeItem.java
@@ -8,14 +8,14 @@
 
 import java.io.Serializable;
 import java.math.BigDecimal;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java
index 8287d1c7041..9fb4a3fd34c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java
@@ -7,12 +7,12 @@
 package edu.harvard.iq.dataverse.datavariable;
 
 import java.util.List;
-import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
@@ -105,7 +105,7 @@ public VariableFormatType findVariableFormatTypeByName(String name) {
         VariableFormatType type = null;
         try {
             type = (VariableFormatType)query.getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (jakarta.persistence.NoResultException e) {
             // DO nothing, just return null.
         }
         return type;
@@ -116,7 +116,7 @@ public VariableIntervalType findVariableIntervalTypeByName(String name) {
         VariableIntervalType type = null;
         try {
             type=(VariableIntervalType)em.createQuery(query).getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (jakarta.persistence.NoResultException e) {
             // DO nothing, just return null.
         }
         return type;
@@ -127,7 +127,7 @@ public SummaryStatisticType findSummaryStatisticTypeByName(String name) {
         SummaryStatisticType type = null;
         try {
             type = (SummaryStatisticType) em.createQuery(query).getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (jakarta.persistence.NoResultException e) {
             // DO nothing, just return null.
         }
         return type;
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java
index 52a9501aefb..7964c56835e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java
@@ -16,7 +16,7 @@
 import java.util.Map;
 import java.util.logging.Logger;
 
-import javax.ws.rs.BadRequestException;
+import jakarta.ws.rs.BadRequestException;
 
 import opennlp.tools.util.StringUtil;
 import org.apache.commons.io.FileUtils;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java
index ef5b9dba407..55a375acb6c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java
@@ -42,7 +42,7 @@
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean;
 import java.util.Stack;
-import javax.persistence.EntityManager;
+import jakarta.persistence.EntityManager;
 
 /**
  * An interface for accessing Dataverse's resources, user info etc. Used by the
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java
index 05be37802f0..d792b616a0c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java
@@ -1,7 +1,6 @@
 package edu.harvard.iq.dataverse.engine.command;
 
 import edu.harvard.iq.dataverse.api.AbstractApiBean;
-import edu.harvard.iq.dataverse.api.batchjob.FileRecordJobResource;
 import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
@@ -11,7 +10,7 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import javax.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletRequest;
 
 /**
  * 
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java
index 36fe3ea7bc1..6061461306d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java
@@ -14,7 +14,6 @@
 import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import edu.harvard.iq.dataverse.util.json.JsonUtil;
 
 import java.sql.Timestamp;
 import java.util.Date;
@@ -23,14 +22,10 @@
 import java.util.logging.Logger;
 import static java.util.stream.Collectors.joining;
 
-import javax.json.JsonObject;
-import javax.servlet.http.HttpServletRequest;
-import javax.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolation;
 import edu.harvard.iq.dataverse.GlobalIdServiceBean;
 import edu.harvard.iq.dataverse.MetadataBlock;
-import edu.harvard.iq.dataverse.MetadataBlockServiceBean;
 import edu.harvard.iq.dataverse.TermsOfUseAndAccess;
-import edu.harvard.iq.dataverse.pidproviders.FakePidProviderServiceBean;
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java
index 8ba1d181609..59c5d970b09 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java
@@ -16,7 +16,7 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJBException;
+import jakarta.ejb.EJBException;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java
index 4a5998aea00..94aff3e3f5d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java
@@ -18,11 +18,11 @@
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import java.util.List;
 import java.util.Set;
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
-import javax.validation.ValidatorFactory;
-import javax.ws.rs.core.Response;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
+import jakarta.validation.ValidatorFactory;
+import jakarta.ws.rs.core.Response;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java
index cb9b0a3c774..8cffcd3d821 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java
@@ -1,7 +1,6 @@
 package edu.harvard.iq.dataverse.engine.command.impl;
 
 import edu.harvard.iq.dataverse.Dataverse;
-import edu.harvard.iq.dataverse.api.AbstractApiBean;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -13,7 +12,7 @@
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import javax.persistence.NoResultException;
+import jakarta.persistence.NoResultException;
 
 /**
  * Create a new role in a dataverse.
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateSavedSearchCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateSavedSearchCommand.java
index 147e1870566..7a549a51dd5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateSavedSearchCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateSavedSearchCommand.java
@@ -12,7 +12,7 @@
 import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.JsonObjectBuilder;
 
 @RequiredPermissions(Permission.PublishDataverse)
 public class CreateSavedSearchCommand extends AbstractCommand<SavedSearch> {
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommand.java
index f23033f09fa..594d4fe25ba 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommand.java
@@ -34,10 +34,10 @@
 import java.util.Set;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonValue;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonValue;
 import javax.net.ssl.SSLContext;
 
 import org.apache.commons.codec.digest.DigestUtils;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java
index 2ca73af3b3c..d6d7b49d172 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java
@@ -21,8 +21,8 @@
 import java.util.Map;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
 
 import org.apache.commons.codec.binary.Hex;
 import org.duracloud.client.ContentStore;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java
index 253c761f0c3..f5e70209744 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java
@@ -37,8 +37,8 @@
 import java.util.concurrent.Future;
 import org.apache.solr.client.solrj.SolrServerException;
 
-import javax.ejb.EJB;
-import javax.inject.Inject;
+import jakarta.ejb.EJB;
+import jakarta.inject.Inject;
 
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java
index 23f08aadd3e..2de2adff099 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java
@@ -12,9 +12,9 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
 
 @RequiredPermissions(Permission.EditDataset)
 public class GetProvJsonCommand extends AbstractCommand<JsonObject> {
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java
index f3324ba6f2e..e41d70d9804 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java
@@ -17,14 +17,14 @@
 import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
 import edu.harvard.iq.dataverse.search.savedsearch.SavedSearch;
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
-import java.math.BigDecimal;
+
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
 
 // Superuser-only enforced below.
 @RequiredPermissions({})
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java
index da2701a41e7..512987866d4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java
@@ -19,8 +19,8 @@
 import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult;
 import org.apache.commons.codec.binary.Hex;
 
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java
index 5f31ea756eb..c03c77d42fd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java
@@ -14,12 +14,12 @@
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 
-import javax.batch.operations.JobOperator;
-import javax.batch.operations.JobSecurityException;
-import javax.batch.operations.JobStartException;
-import javax.batch.runtime.BatchRuntime;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
+import jakarta.batch.operations.JobOperator;
+import jakarta.batch.operations.JobSecurityException;
+import jakarta.batch.operations.JobStartException;
+import jakarta.batch.runtime.BatchRuntime;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
 import java.io.File;
 import java.util.Properties;
 import java.util.logging.Level;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java
index 1c63a1a3c4f..55fe96556a5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java
@@ -23,8 +23,8 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Date;
-import java.util.concurrent.Future;
-import javax.ws.rs.core.Response;
+
+import jakarta.ws.rs.core.Response;
 import org.apache.solr.client.solrj.SolrServerException;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java
index c7e91b2967b..d2f061b6e70 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java
@@ -17,8 +17,8 @@
 import java.util.Map;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
 
 import java.io.File;
 import java.io.FileOutputStream;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java
index bdb6ceffd6d..b9346a43af8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java
@@ -20,7 +20,7 @@
 import java.nio.channels.FileChannel;
 import java.nio.channels.ReadableByteChannel;
 import java.util.logging.Logger;
-import javax.ejb.EJBException;
+import jakarta.ejb.EJBException;
 
 @RequiredPermissions(Permission.EditDataset)
 public class RedetectFileTypeCommand extends AbstractCommand<DataFile> {
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java
index f24d956e9d7..f02edd54b86 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java
@@ -20,9 +20,9 @@
 import java.util.Map;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
 
 import org.eclipse.microprofile.config.Config;
 import org.eclipse.microprofile.config.ConfigProvider;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java
index 29180f65e36..f2b89746160 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java
@@ -22,11 +22,11 @@
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
 import edu.harvard.iq.dataverse.util.FileUtil;
-import edu.harvard.iq.dataverse.util.StringUtil;
+
 import java.io.IOException;
 import java.util.Collections;
 import java.util.logging.Logger;
-import javax.persistence.Query;
+import jakarta.persistence.Query;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java
index 5f4ac24c1d9..7591bebe796 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java
@@ -17,7 +17,7 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import javax.validation.ConstraintViolationException;
+import jakarta.validation.ConstraintViolationException;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java
index 218b0ea89d9..56c76f04c05 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java
@@ -6,7 +6,7 @@
 import edu.harvard.iq.dataverse.Dataverse.DataverseType;
 import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel;
 import edu.harvard.iq.dataverse.authorization.Permission;
-import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
+
 import static edu.harvard.iq.dataverse.dataverse.DataverseUtil.validateDataverseMetadataExternally;
 import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
@@ -14,14 +14,11 @@
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
-import edu.harvard.iq.dataverse.search.IndexResponse;
-import java.io.IOException;
+
 import java.util.ArrayList;
 import java.util.List;
-import java.util.concurrent.Future;
 import java.util.logging.Logger;
-import javax.persistence.TypedQuery;
-import org.apache.solr.client.solrj.SolrServerException;
+import jakarta.persistence.TypedQuery;
 
 /**
  * Update an existing dataverse.
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DCTermsExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/DCTermsExporter.java
index 8df17e71ae6..f82c0d9ad3d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/DCTermsExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/DCTermsExporter.java
@@ -12,7 +12,7 @@
 import java.util.Locale;
 import java.util.Optional;
 
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLStreamException;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java
index 59ff539af37..5119b4b96c7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java
@@ -33,14 +33,14 @@
 import java.util.logging.Logger;
 import java.util.logging.Level;
 import java.io.OutputStream;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import javax.xml.stream.XMLStreamWriter;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLOutputFactory;
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DDIExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/DDIExporter.java
index 66a9ea6c665..d48ce3a537d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/DDIExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/DDIExporter.java
@@ -2,7 +2,6 @@
 package edu.harvard.iq.dataverse.export;
 
 import com.google.auto.service.AutoService;
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil;
 import io.gdcc.spi.export.ExportDataProvider;
 import io.gdcc.spi.export.ExportException;
@@ -13,7 +12,7 @@
 import java.util.Locale;
 import java.util.Optional;
 
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamWriter;
 import javax.xml.stream.XMLOutputFactory;
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DublinCoreExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/DublinCoreExporter.java
index 04b7892d737..0fa32dd4bfa 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/DublinCoreExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/DublinCoreExporter.java
@@ -2,7 +2,6 @@
 package edu.harvard.iq.dataverse.export;
 
 import com.google.auto.service.AutoService;
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.export.dublincore.DublinCoreExportUtil;
 import io.gdcc.spi.export.ExportDataProvider;
 import io.gdcc.spi.export.ExportException;
@@ -13,7 +12,7 @@
 import java.util.Locale;
 import java.util.Optional;
 
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLStreamException;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java
index eed84a19a66..8342e7df92a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java
@@ -9,7 +9,6 @@
 import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getStorageIO;
 import edu.harvard.iq.dataverse.dataaccess.DataAccessOption;
 import edu.harvard.iq.dataverse.dataaccess.StorageIO;
-import io.gdcc.spi.export.ExportDataProvider;
 import io.gdcc.spi.export.ExportException;
 import io.gdcc.spi.export.Exporter;
 import io.gdcc.spi.export.XMLExporter;
@@ -47,7 +46,7 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.MediaType;
 
 import org.apache.commons.io.IOUtils;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java
index 1b449060b17..9d0b107299e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java
@@ -1,23 +1,18 @@
 package edu.harvard.iq.dataverse.export;
 
 import com.google.auto.service.AutoService;
-import edu.harvard.iq.dataverse.Dataset;
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil;
 import io.gdcc.spi.export.ExportDataProvider;
 import io.gdcc.spi.export.ExportException;
 import io.gdcc.spi.export.Exporter;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
-import javax.json.JsonObject;
-import javax.ws.rs.core.MediaType;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.core.MediaType;
 import javax.xml.stream.XMLStreamException;
-import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
-import java.nio.file.Path;
-import java.nio.file.Paths;
 import java.util.Locale;
 import java.util.Optional;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/InternalExportDataProvider.java b/src/main/java/edu/harvard/iq/dataverse/export/InternalExportDataProvider.java
index 7c76c4972a8..a7967f6ccb6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/InternalExportDataProvider.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/InternalExportDataProvider.java
@@ -3,11 +3,11 @@
 import java.io.InputStream;
 import java.util.Optional;
 
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
 
 import edu.harvard.iq.dataverse.DOIDataCiteRegisterService;
 import edu.harvard.iq.dataverse.DataCitation;
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java
index 30347017071..a54e61c7c1e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java
@@ -2,7 +2,6 @@
 package edu.harvard.iq.dataverse.export;
 
 import com.google.auto.service.AutoService;
-import edu.harvard.iq.dataverse.DatasetVersion;
 import io.gdcc.spi.export.ExportDataProvider;
 import io.gdcc.spi.export.ExportException;
 import io.gdcc.spi.export.Exporter;
@@ -11,8 +10,8 @@
 import java.util.Locale;
 import java.util.Optional;
 
-import javax.json.JsonObject;
-import javax.ws.rs.core.MediaType;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.core.MediaType;
 
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/OAI_DDIExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/OAI_DDIExporter.java
index de97a88d1fa..0b4121c6025 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/OAI_DDIExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/OAI_DDIExporter.java
@@ -2,7 +2,6 @@
 package edu.harvard.iq.dataverse.export;
 
 import com.google.auto.service.AutoService;
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil;
 import io.gdcc.spi.export.ExportDataProvider;
 import io.gdcc.spi.export.ExportException;
@@ -13,7 +12,7 @@
 import java.util.Locale;
 import java.util.Optional;
 
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLStreamException;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java
index 06fee32e220..feec4403570 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java
@@ -1,21 +1,18 @@
 package edu.harvard.iq.dataverse.export;
 
 import com.google.auto.service.AutoService;
-import edu.harvard.iq.dataverse.DatasetVersion;
 import io.gdcc.spi.export.ExportDataProvider;
 import io.gdcc.spi.export.ExportException;
 import io.gdcc.spi.export.Exporter;
-import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import edu.harvard.iq.dataverse.util.bagit.OREMap;
+
 import java.io.OutputStream;
 import java.util.Locale;
 import java.util.Optional;
-import java.util.ResourceBundle;
 import java.util.logging.Logger;
 
-import javax.json.JsonObject;
-import javax.ws.rs.core.MediaType;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.core.MediaType;
 
 @AutoService(Exporter.class)
 public class OAI_OREExporter implements Exporter {
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/OpenAireExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/OpenAireExporter.java
index 03b8ebfa76f..8bd4ae6a042 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/OpenAireExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/OpenAireExporter.java
@@ -3,12 +3,11 @@
 import java.io.OutputStream;
 import java.util.Locale;
 
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLStreamException;
 
 import com.google.auto.service.AutoService;
 
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.export.openaire.OpenAireExportUtil;
 import io.gdcc.spi.export.ExportDataProvider;
 import io.gdcc.spi.export.ExportException;
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java
index f3be1e83d2d..5428715b905 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java
@@ -9,7 +9,7 @@
 import java.io.OutputStream;
 import java.util.Locale;
 import java.util.logging.Logger;
-import javax.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.MediaType;
 
 /**
  * Schema.org JSON-LD is used by Google Dataset Search and other services to
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
index 846267e0a2c..24449e8010a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
@@ -3,26 +3,13 @@
 import com.google.gson.Gson;
 
 import edu.harvard.iq.dataverse.ControlledVocabularyValue;
-import edu.harvard.iq.dataverse.DataFile;
-import edu.harvard.iq.dataverse.DataTable;
 import edu.harvard.iq.dataverse.DatasetFieldConstant;
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.DvObjectContainer;
-import edu.harvard.iq.dataverse.FileMetadata;
-import edu.harvard.iq.dataverse.GlobalId;
 import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
 import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO;
 import edu.harvard.iq.dataverse.api.dto.FieldDTO;
 import edu.harvard.iq.dataverse.api.dto.FileDTO;
 import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO;
-import edu.harvard.iq.dataverse.datavariable.VariableMetadata;
-import edu.harvard.iq.dataverse.datavariable.DataVariable;
-import edu.harvard.iq.dataverse.datavariable.VariableServiceBean;
-import edu.harvard.iq.dataverse.datavariable.VariableRange;
-import edu.harvard.iq.dataverse.datavariable.SummaryStatistic;
-import edu.harvard.iq.dataverse.datavariable.VariableCategory;
-import edu.harvard.iq.dataverse.datavariable.VarGroup;
-import edu.harvard.iq.dataverse.datavariable.CategoryMetadata;
 
 import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.LEVEL_FILE;
 import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_TAG;
@@ -34,43 +21,36 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 
 
-import edu.harvard.iq.dataverse.util.BundleUtil;
-import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import edu.harvard.iq.dataverse.util.json.JsonPrinter;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import edu.harvard.iq.dataverse.util.xml.XmlPrinter;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.file.Files;
-import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.time.LocalDate;
 import java.util.*;
 import java.util.Map.Entry;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonString;
-import javax.json.JsonValue;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue;
 import javax.xml.stream.XMLOutputFactory;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamWriter;
 
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.FactoryConfigurationError;
 import javax.xml.parsers.ParserConfigurationException;
 import org.xml.sax.SAXException;
-import org.xml.sax.SAXParseException;
 import org.w3c.dom.Document;
 import org.apache.commons.lang3.StringUtils;
-import org.w3c.dom.DOMException;
 
 // For write operation
 import javax.xml.transform.Transformer;
@@ -80,9 +60,7 @@
 import javax.xml.transform.dom.DOMSource;
 import javax.xml.transform.stream.StreamSource;
 import javax.xml.transform.stream.StreamResult;
-import java.io.File;
 import java.io.InputStream;
-import java.io.InputStreamReader;
 
 public class DdiExportUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java
index 149c6791a7e..d0cd8a4cae7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java
@@ -1,9 +1,9 @@
 package edu.harvard.iq.dataverse.export.ddi;
 
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Singleton;
-import javax.ejb.Startup;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.ejb.Startup;
 
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtil.java
index 238cea78fb5..6b7cb844f3e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtil.java
@@ -14,18 +14,16 @@
 import edu.harvard.iq.dataverse.api.dto.LicenseDTO;
 import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO;
 import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil;
-import edu.harvard.iq.dataverse.license.License;
 import edu.harvard.iq.dataverse.pidproviders.PidUtil;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
-import java.io.ByteArrayOutputStream;
+
 import java.io.OutputStream;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLOutputFactory;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamWriter;
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java
index 6e1e61ebd06..f7e75ca03fa 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java
@@ -8,7 +8,7 @@
 import java.util.Set;
 import java.util.logging.Logger;
 
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLOutputFactory;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamWriter;
@@ -21,7 +21,6 @@
 import edu.harvard.iq.dataverse.DatasetFieldConstant;
 import edu.harvard.iq.dataverse.GlobalId;
 import edu.harvard.iq.dataverse.HandlenetServiceBean;
-import edu.harvard.iq.dataverse.TermsOfUseAndAccess;
 import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
 import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO;
 import edu.harvard.iq.dataverse.api.dto.FieldDTO;
@@ -31,8 +30,8 @@
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
-import javax.mail.internet.AddressException;
-import javax.mail.internet.InternetAddress;
+import jakarta.mail.internet.AddressException;
+import jakarta.mail.internet.InternetAddress;
 
 public class OpenAireExportUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java
index bea6cebc1d7..7f1f46c06cb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java
+++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java
@@ -6,20 +6,20 @@
 import java.io.Serializable;
 import java.util.Arrays;
 import java.util.List;
-import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToMany;
+
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToMany;
 
 /**
  * A specification or definition for how an external tool is intended to
diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java
index dac046373ba..a52679deebc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java
+++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java
@@ -22,15 +22,15 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonNumber;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonString;
-import javax.json.JsonValue;
-import javax.ws.rs.HttpMethod;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonNumber;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue;
+import jakarta.ws.rs.HttpMethod;
 
 import org.apache.commons.codec.binary.StringUtils;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java
index 5ecf5cd755c..e13843eadfa 100644
--- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java
@@ -6,34 +6,31 @@
 import edu.harvard.iq.dataverse.DataFileServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.externaltools.ExternalTool.Type;
-import edu.harvard.iq.dataverse.util.URLTokenUtil;
 import edu.harvard.iq.dataverse.util.URLTokenUtil.ReservedWord;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import edu.harvard.iq.dataverse.externaltools.ExternalTool.Scope;
 
-import java.io.StringReader;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonReader;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 import static edu.harvard.iq.dataverse.externaltools.ExternalTool.*;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-import javax.ejb.EJB;
-import javax.json.JsonValue;
+
+import jakarta.ejb.EJB;
+import jakarta.json.JsonValue;
 
 @Stateless
 @Named
diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolType.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolType.java
index 3564d1871b5..fb4c0f5dc5d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolType.java
+++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolType.java
@@ -1,18 +1,18 @@
 package edu.harvard.iq.dataverse.externaltools;
 
 import java.io.Serializable;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 @Entity
 @Table(indexes = {
diff --git a/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java b/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java
index c9acb491aa2..c1162eb8db6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java
+++ b/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.feedback;
 
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
 
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/feedback/FeedbackUtil.java b/src/main/java/edu/harvard/iq/dataverse/feedback/FeedbackUtil.java
index 750a3923806..6ae0e165141 100644
--- a/src/main/java/edu/harvard/iq/dataverse/feedback/FeedbackUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/feedback/FeedbackUtil.java
@@ -17,8 +17,8 @@
 import java.util.List;
 import java.util.logging.Logger;
 
-import javax.json.JsonObject;
-import javax.mail.internet.InternetAddress;
+import jakarta.json.JsonObject;
+import jakarta.mail.internet.InternetAddress;
 
 public class FeedbackUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/flyway/StartupFlywayMigrator.java b/src/main/java/edu/harvard/iq/dataverse/flyway/StartupFlywayMigrator.java
index 71b53bd43f2..39bc46216ca 100644
--- a/src/main/java/edu/harvard/iq/dataverse/flyway/StartupFlywayMigrator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/flyway/StartupFlywayMigrator.java
@@ -2,12 +2,12 @@
 
 import org.flywaydb.core.Flyway;
 
-import javax.annotation.PostConstruct;
-import javax.annotation.Resource;
-import javax.ejb.Singleton;
-import javax.ejb.Startup;
-import javax.ejb.TransactionManagement;
-import javax.ejb.TransactionManagementType;
+import jakarta.annotation.PostConstruct;
+import jakarta.annotation.Resource;
+import jakarta.ejb.Singleton;
+import jakarta.ejb.Startup;
+import jakarta.ejb.TransactionManagement;
+import jakarta.ejb.TransactionManagementType;
 import javax.sql.DataSource;
 
 @Startup
diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java
index 9d80c5cc280..d2613422be9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java
@@ -4,19 +4,19 @@
 import com.google.gson.GsonBuilder;
 import edu.harvard.iq.dataverse.*;
 
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonPatch;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonPatch;
+import jakarta.servlet.http.HttpServletRequest;
 
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray;
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java
index 50d06807a13..ba6f5c3dec2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java
@@ -7,14 +7,14 @@
 
 import java.io.Serializable;
 import java.util.Date;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java
index 40bd45ecb30..20884e3360c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java
@@ -21,13 +21,13 @@
 import java.util.logging.FileHandler;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.annotation.Resource;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.ejb.Timer;
-import javax.inject.Named;
+import jakarta.annotation.Resource;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.Timer;
+import jakarta.inject.Named;
 import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.transform.TransformerException;
 import org.apache.commons.lang3.mutable.MutableBoolean;
@@ -51,8 +51,8 @@
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.nio.file.Path;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
@@ -69,7 +69,7 @@ public class HarvesterServiceBean {
     @EJB
     DatasetServiceBean datasetService;
     @Resource
-    javax.ejb.TimerService timerService;
+    jakarta.ejb.TimerService timerService;
     @EJB
     DataverseTimerServiceBean dataverseTimerService;
     @EJB
@@ -148,12 +148,12 @@ public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId
                 
         String logTimestamp = logFormatter.format(new Date());
         Logger hdLogger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean." + harvestingClientConfig.getName() + logTimestamp);
-        String logFileName = "../logs" + File.separator + "harvest_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".log";
+        String logFileName = System.getProperty("com.sun.aas.instanceRoot") + File.separator + "logs" + File.separator + "harvest_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".log";
         FileHandler fileHandler = new FileHandler(logFileName);
         hdLogger.setUseParentHandlers(false);
         hdLogger.addHandler(fileHandler);
         
-        PrintWriter importCleanupLog = new PrintWriter(new FileWriter( "../logs/harvest_cleanup_" + harvestingClientConfig.getName() + "_" + logTimestamp+".txt"));
+        PrintWriter importCleanupLog = new PrintWriter(new FileWriter(System.getProperty("com.sun.aas.instanceRoot") + File.separator + "logs/harvest_cleanup_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".txt"));
         
         
         List<Long> harvestedDatasetIds = new ArrayList<>();
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java
index d27ddc41b7f..40db55f2a0c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java
@@ -17,25 +17,25 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.OneToOne;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
-import javax.validation.constraints.Pattern;
-import javax.validation.constraints.Size;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
+import jakarta.validation.constraints.Pattern;
+import jakarta.validation.constraints.Size;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java
index 13cc44ce919..7ec6d75a41c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java
@@ -5,25 +5,23 @@
 import edu.harvard.iq.dataverse.DataverseRequestServiceBean;
 import edu.harvard.iq.dataverse.DataverseServiceBean;
 import edu.harvard.iq.dataverse.EjbDataverseEngine;
-import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
-import edu.harvard.iq.dataverse.engine.command.impl.DeleteHarvestingClientCommand;
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
 import edu.harvard.iq.dataverse.timer.DataverseTimerServiceBean;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecord.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecord.java
index 49e40e786ea..94753d8594d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecord.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecord.java
@@ -21,12 +21,12 @@
 
 import java.io.Serializable;
 import java.util.Date;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java
index e73dbb9fc07..1b4a7bc7db0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java
@@ -13,27 +13,22 @@
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.time.Instant;
-import java.io.File;
-import java.io.IOException;
-import java.sql.Timestamp;
-import java.text.SimpleDateFormat;
 import java.util.Collection;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.logging.FileHandler;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import static javax.ejb.TransactionAttributeType.REQUIRES_NEW;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
-import javax.persistence.TemporalType;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
+import jakarta.persistence.TemporalType;
 
 /**
  *
@@ -265,7 +260,7 @@ public OAIRecord findOAIRecordBySetNameandGlobalId(String setName, String global
         
         try {
            oaiRecord = (OAIRecord) query.setMaxResults(1).getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (jakarta.persistence.NoResultException e) {
            // Do nothing, just return null. 
         }
         logger.fine("returning oai record.");
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISet.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISet.java
index 038bb66de32..8d6b04effef 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISet.java
@@ -20,16 +20,16 @@
 package edu.harvard.iq.dataverse.harvest.server;
 
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-import javax.persistence.Version;
-import javax.validation.constraints.Pattern;
-import javax.validation.constraints.Size;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Version;
+import jakarta.validation.constraints.Pattern;
+import jakarta.validation.constraints.Size;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java
index 6b28c8808a0..2bd666401c7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java
@@ -17,19 +17,17 @@
 import java.util.logging.FileHandler;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import org.apache.solr.client.solrj.SolrClient;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
-import org.apache.solr.client.solrj.impl.HttpSolrClient.RemoteSolrException;
+import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java
index 9cf1629abfc..96a19acc0e8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java
@@ -38,17 +38,16 @@
 
 
 import java.io.IOException;
-import java.time.Instant;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.inject.Inject;
+import jakarta.ejb.EJB;
+import jakarta.inject.Inject;
 import org.eclipse.microprofile.config.inject.ConfigProperty;
-import javax.mail.internet.InternetAddress;
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.mail.internet.InternetAddress;
+import jakarta.servlet.ServletConfig;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServlet;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import javax.xml.stream.XMLStreamException;
 import org.eclipse.microprofile.config.Config;
 import org.eclipse.microprofile.config.ConfigProvider;
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java
index 77ec6701bc6..f56fe608a52 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java
@@ -29,15 +29,15 @@
 import java.time.Instant;
 import java.util.Iterator;
 import java.util.logging.Logger;
-import javax.ejb.ActivationConfigProperty;
-import javax.ejb.EJB;
-import javax.ejb.MessageDriven;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.jms.JMSException;
-import javax.jms.Message;
-import javax.jms.MessageListener;
-import javax.jms.ObjectMessage;
+import jakarta.ejb.ActivationConfigProperty;
+import jakarta.ejb.EJB;
+import jakarta.ejb.MessageDriven;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.jms.JMSException;
+import jakarta.jms.Message;
+import jakarta.jms.MessageListener;
+import jakarta.jms.ObjectMessage;
 /**
  *
  * This is an experimental, JMS-based implementation of asynchronous 
@@ -49,7 +49,7 @@
     mappedName = "java:app/jms/queue/ingest",
     activationConfig =  {
         @ActivationConfigProperty(propertyName = "acknowledgeMode", propertyValue = "Auto-acknowledge"),
-        @ActivationConfigProperty(propertyName = "destinationType", propertyValue = "javax.jms.Queue")
+        @ActivationConfigProperty(propertyName = "destinationType", propertyValue = "jakarta.jms.Queue")
     }
 )
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestQueueProducer.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestQueueProducer.java
index 1ba63207208..0fed25e5c88 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestQueueProducer.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestQueueProducer.java
@@ -1,19 +1,19 @@
 package edu.harvard.iq.dataverse.ingest;
 
-import javax.annotation.Resource;
+import jakarta.annotation.Resource;
 // https://www.baeldung.com/jee-cdi-vs-ejb-singleton
-import javax.inject.Singleton;
-import javax.enterprise.inject.Produces;
-import javax.jms.JMSConnectionFactoryDefinition;
-import javax.jms.JMSDestinationDefinition;
-import javax.jms.Queue;
-import javax.jms.QueueConnectionFactory;
+import jakarta.inject.Singleton;
+import jakarta.enterprise.inject.Produces;
+import jakarta.jms.JMSConnectionFactoryDefinition;
+import jakarta.jms.JMSDestinationDefinition;
+import jakarta.jms.Queue;
+import jakarta.jms.QueueConnectionFactory;
 
 @JMSConnectionFactoryDefinition(
     description = "Dataverse Ingest Queue Factory",
     name = "java:app/jms/factory/ingest",
     resourceAdapter = "jmsra",
-    interfaceName = "javax.jms.QueueConnectionFactory",
+    interfaceName = "jakarta.jms.QueueConnectionFactory",
     maxPoolSize = 250,
     minPoolSize = 1,
     properties = {
@@ -25,7 +25,7 @@
     description = "Dataverse Ingest Queue",
     name = "java:app/jms/queue/ingest",
     resourceAdapter = "jmsra",
-    interfaceName="javax.jms.Queue",
+    interfaceName="jakarta.jms.Queue",
     destinationName = "DataverseIngest"
 )
 @Singleton
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestReport.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestReport.java
index 31208abf839..a1a8bde77f4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestReport.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestReport.java
@@ -9,17 +9,17 @@
 import edu.harvard.iq.dataverse.DataFile;
 import java.io.Serializable;
 import java.util.Date;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.Lob;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.Lob;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestRequest.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestRequest.java
index 024e90325c3..a5d6a1af75c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestRequest.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestRequest.java
@@ -7,16 +7,16 @@
 
 import edu.harvard.iq.dataverse.DataFile;
 import java.io.Serializable;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java
index c1e82042898..40dc3d6fdd6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java
@@ -107,20 +107,20 @@
 import java.util.logging.Logger;
 import java.util.Hashtable;
 import java.util.Optional;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.jms.Queue;
-import javax.jms.QueueConnectionFactory;
-import javax.annotation.Resource;
-import javax.ejb.Asynchronous;
-import javax.jms.JMSException;
-import javax.jms.QueueConnection;
-import javax.jms.QueueSender;
-import javax.jms.QueueSession;
-import javax.jms.Message;
-import javax.faces.application.FacesMessage;
-import javax.ws.rs.core.MediaType;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.jms.Queue;
+import jakarta.jms.QueueConnectionFactory;
+import jakarta.annotation.Resource;
+import jakarta.ejb.Asynchronous;
+import jakarta.jms.JMSException;
+import jakarta.jms.QueueConnection;
+import jakarta.jms.QueueSender;
+import jakarta.jms.QueueSession;
+import jakarta.jms.Message;
+import jakarta.faces.application.FacesMessage;
+import jakarta.ws.rs.core.MediaType;
 import ucar.nc2.NetcdfFile;
 import ucar.nc2.NetcdfFiles;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java
index 9ea2cd0343f..8c5dad237b1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java
@@ -16,7 +16,7 @@
 import java.io.IOException;
 import java.util.List;
 import java.util.logging.Logger;
-//import javax.ejb.EJB;
+//import jakarta.ejb.EJB;
 
 /**
  *  Used by the IngestServiceBean to redistribute a zipped Shapefile*
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java
index 368824680c0..3d30f7e6ec3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java
@@ -20,15 +20,12 @@
 package edu.harvard.iq.dataverse.ingest;
 
 import edu.harvard.iq.dataverse.DataFile;
-import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.FileMetadata;
 import edu.harvard.iq.dataverse.util.FileUtil;
 
 import java.io.File;
-import java.io.FileWriter;
 import java.io.IOException;
-import java.io.PrintWriter;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashSet;
@@ -36,9 +33,9 @@
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
 import org.dataverse.unf.UNFUtil;
 import org.dataverse.unf.UnfException;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java
index 6d17a5bd553..eb1353fd792 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java
@@ -21,14 +21,12 @@
 
 
 import java.io.*;
-import java.io.FileReader;
 import java.io.InputStreamReader;
 import java.text.*;
 import java.util.logging.*;
 import java.util.*;
-import java.security.NoSuchAlgorithmException;
 
-import javax.inject.Inject;
+import jakarta.inject.Inject;
 
 // Rosuda Wrappers and Methods for R-calls to Rserve
 import edu.harvard.iq.dataverse.settings.JvmSettings;
@@ -43,18 +41,14 @@
 import edu.harvard.iq.dataverse.datavariable.DataVariable;
 import edu.harvard.iq.dataverse.datavariable.VariableCategory;
 
-import edu.harvard.iq.dataverse.ingest.plugin.spi.*;
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataFileReader;
 import edu.harvard.iq.dataverse.ingest.tabulardata.spi.TabularDataFileReaderSpi;
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataIngest;
 import edu.harvard.iq.dataverse.rserve.*;
-import javax.naming.Context;
-import javax.naming.InitialContext;
-import javax.naming.NamingException;
 
 
 import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.commons.lang3.ArrayUtils;
+
 /**
  * Dataverse 4.0 implementation of <code>TabularDataFileReader</code> for the 
  * RData Binary Format.
diff --git a/src/main/java/edu/harvard/iq/dataverse/license/License.java b/src/main/java/edu/harvard/iq/dataverse/license/License.java
index c6e2cdbc2e5..fe19073ab8d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/license/License.java
+++ b/src/main/java/edu/harvard/iq/dataverse/license/License.java
@@ -1,15 +1,15 @@
 package edu.harvard.iq.dataverse.license;
 
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.Table;
-import javax.persistence.UniqueConstraint;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.Table;
+import jakarta.persistence.UniqueConstraint;
 
 import edu.harvard.iq.dataverse.TermsOfUseAndAccess;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/license/LicenseConverter.java b/src/main/java/edu/harvard/iq/dataverse/license/LicenseConverter.java
index 4f874132128..26b7ca63a29 100644
--- a/src/main/java/edu/harvard/iq/dataverse/license/LicenseConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/license/LicenseConverter.java
@@ -6,11 +6,11 @@
 
 package edu.harvard.iq.dataverse.license;
 
-import javax.enterprise.inject.spi.CDI;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.enterprise.inject.spi.CDI;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 @FacesConverter("licenseConverter")
 public class LicenseConverter implements Converter {
diff --git a/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java
index 9704e4b92dc..93f4958038c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java
@@ -5,13 +5,13 @@
 import edu.harvard.iq.dataverse.api.AbstractApiBean.WrappedResponse;
 import static edu.harvard.iq.dataverse.dataset.DatasetUtil.getLocalizedLicenseName;
 
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.PersistenceException;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.PersistenceException;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
diff --git a/src/main/java/edu/harvard/iq/dataverse/locality/DvObjectStorageLocation.java b/src/main/java/edu/harvard/iq/dataverse/locality/DvObjectStorageLocation.java
index 33486128e7b..5844c8b6ab9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/locality/DvObjectStorageLocation.java
+++ b/src/main/java/edu/harvard/iq/dataverse/locality/DvObjectStorageLocation.java
@@ -2,11 +2,11 @@
 
 import edu.harvard.iq.dataverse.DvObject;
 import java.io.Serializable;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToOne;
 
 /**
  * Future use, maybe. Once we're happy with the design we'll enable it as an
diff --git a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSite.java b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSite.java
index d873b9f8989..c074cb5918f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSite.java
+++ b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSite.java
@@ -2,13 +2,13 @@
 
 import java.io.Serializable;
 import java.util.Objects;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
 
 @Entity
 public class StorageSite implements Serializable {
diff --git a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteServiceBean.java
index c7057ab9318..781e896e9a7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteServiceBean.java
@@ -2,12 +2,12 @@
 
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.Stateless;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 @Stateless
 public class StorageSiteServiceBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteUtil.java b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteUtil.java
index ebc2bb0f19f..6ff0f7ca379 100644
--- a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteUtil.java
@@ -2,7 +2,7 @@
 
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.List;
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 
 public class StorageSiteUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitations.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitations.java
index 3c1c0bc0c68..469f3abe9da 100644
--- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitations.java
+++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitations.java
@@ -7,14 +7,14 @@
 
 import edu.harvard.iq.dataverse.Dataset;
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.validation.constraints.NotNull;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.validation.constraints.NotNull;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java
index c05fc9b1a4e..50c24274bb2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java
@@ -10,16 +10,16 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonValue;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonValue;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java
index fe0565c3ff8..ac3dff356eb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java
+++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java
@@ -3,15 +3,15 @@
 import edu.harvard.iq.dataverse.Dataset;
 import java.io.Serializable;
 
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Transient;
-import javax.validation.constraints.NotNull;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Transient;
+import jakarta.validation.constraints.NotNull;
 
 /**
  * Cached versions of views, downloads, and citations to show in the UI and API.
diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java
index 39afdf318ad..0925c164bf4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java
@@ -5,26 +5,23 @@
 import edu.harvard.iq.dataverse.DatasetServiceBean;
 import java.io.StringReader;
 import java.math.BigDecimal;
-import java.sql.Timestamp;
 import java.util.ArrayList;
-import java.util.Date;
 import java.util.List;
 import java.util.ListIterator;
 import java.util.Set;
-import java.util.concurrent.Future;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.json.JsonValue;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.json.JsonValue;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java
index e1a635a9d6c..5edf2fde0c3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java
@@ -16,14 +16,14 @@
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.TimeZone;
-import javax.ejb.EJB;
-import javax.enterprise.context.RequestScoped;
-import javax.faces.context.FacesContext;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpSession;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.UriInfo;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.RequestScoped;
+import jakarta.faces.context.FacesContext;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpSession;
+import jakarta.ws.rs.core.HttpHeaders;
+import jakarta.ws.rs.core.UriInfo;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java
index f3d45642083..8f32750f090 100644
--- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java
@@ -1,17 +1,16 @@
 package edu.harvard.iq.dataverse.makedatacount;
 
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonValue;
-import javax.persistence.Transient;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonValue;
+import jakarta.persistence.Transient;
 
 /**
  * See doc/sphinx-guides/source/admin/make-data-count.rst for user facing docs
diff --git a/src/main/java/edu/harvard/iq/dataverse/metadataimport/ForeignMetadataImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metadataimport/ForeignMetadataImportServiceBean.java
index 88af8478a8f..33f8277919a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/metadataimport/ForeignMetadataImportServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/metadataimport/ForeignMetadataImportServiceBean.java
@@ -2,7 +2,6 @@
 package edu.harvard.iq.dataverse.metadataimport;
 
 
-import edu.harvard.iq.dataverse.ControlledVocabularyValue;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.DatasetField;
 import edu.harvard.iq.dataverse.DatasetFieldCompoundValue;
@@ -17,13 +16,13 @@
 import java.io.IOException;
 import java.util.logging.Logger;
 import java.io.StringReader;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
 import javax.xml.stream.XMLStreamConstants;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamReader;
diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java
index ff5ffee8f85..065b42e5afe 100644
--- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java
@@ -3,7 +3,6 @@
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.Metric;
-import edu.harvard.iq.dataverse.makedatacount.DatasetMetrics;
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil.MetricType;
 
 import static edu.harvard.iq.dataverse.metrics.MetricsUtil.*;
@@ -23,19 +22,19 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.ws.rs.core.UriInfo;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.ws.rs.core.UriInfo;
 
 @Stateless
 public class MetricsServiceBean implements Serializable {
@@ -383,7 +382,7 @@ public JsonArray filesByType(Dataverse d) {
                 jab.add(stats);
             }
 
-        } catch (javax.persistence.NoResultException nr) {
+        } catch (NoResultException nr) {
             // do nothing
         }
         return jab.build();
@@ -519,7 +518,7 @@ public JsonArray fileDownloads(String yyyymm, Dataverse d, boolean uniqueCounts)
                 job.add(MetricsUtil.COUNT, (long) result[2]);
                 jab.add(job);
             }
-        } catch (javax.persistence.NoResultException nr) {
+        } catch (NoResultException nr) {
             // do nothing
         }
         return jab.build();
@@ -558,7 +557,7 @@ public JsonArray uniqueDatasetDownloads(String yyyymm, Dataverse d) {
                 jab.add(job);
             }
 
-        } catch (javax.persistence.NoResultException nr) {
+        } catch (NoResultException nr) {
             // do nothing
         }
         return jab.build();
@@ -718,7 +717,7 @@ public Metric getMetric(String name, String dataLocation, String dayString, Data
         Metric metric = null;
         try {
             metric = (Metric) query.getSingleResult();
-        } catch (javax.persistence.NoResultException nr) {
+        } catch (NoResultException nr) {
             // do nothing
             logger.fine("No result");
         } catch (NonUniqueResultException nur) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java
index 72d8f5402bb..74bb53e1191 100644
--- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java
@@ -6,21 +6,19 @@
 import java.time.LocalDate;
 import java.time.YearMonth;
 import java.time.format.DateTimeFormatter;
-import java.time.format.DateTimeFormatterBuilder;
 import java.time.format.DateTimeParseException;
-import java.time.temporal.ChronoField;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import javax.ws.rs.BadRequestException;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonReader;
+import jakarta.ws.rs.BadRequestException;
 
 public class MetricsUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java b/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java
index d244021d01a..e9898031343 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java
@@ -25,25 +25,23 @@
 import edu.harvard.iq.dataverse.search.SearchException;
 import edu.harvard.iq.dataverse.search.SearchFields;
 import edu.harvard.iq.dataverse.search.SortBy;
-import java.math.BigDecimal;
-import java.util.Arrays;
+
 import java.util.List;
 import java.util.Map;
-import java.util.Random;
 import java.util.logging.Logger;
 import java.util.Locale;
-import javax.ejb.EJB;
-import javax.inject.Inject;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Context;
+import jakarta.ejb.EJB;
+import jakarta.inject.Inject;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.DefaultValue;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import org.apache.commons.lang3.StringUtils;
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java
index c378034f951..2ab248fcc0b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java
@@ -17,9 +17,9 @@
 import java.util.HashMap;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java
index 6acdfd9cdde..917884f3549 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java
@@ -18,9 +18,9 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataPage.java b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataPage.java
index 2567f8b2774..3ae64d9d760 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataPage.java
@@ -18,12 +18,12 @@
 import java.util.Arrays;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 
 /*
  * To change this license header, choose License Headers in Project Properties.
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/Pager.java b/src/main/java/edu/harvard/iq/dataverse/mydata/Pager.java
index 4bf13e04284..096974b9d72 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/Pager.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/Pager.java
@@ -15,9 +15,9 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java b/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java
index 4c596d1bb84..dcb76f42acb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java
@@ -3,16 +3,15 @@
 import edu.harvard.iq.dataverse.DatasetPage;
 import edu.harvard.iq.dataverse.DataverseRoleServiceBean;
 import edu.harvard.iq.dataverse.DataverseSession;
-import edu.harvard.iq.dataverse.DvObject;
 import edu.harvard.iq.dataverse.RoleAssigneeServiceBean;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.DataverseRolePermissionHelper;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /*
  * To change this license header, choose License Headers in Project Properties.
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/RoleTagRetriever.java b/src/main/java/edu/harvard/iq/dataverse/mydata/RoleTagRetriever.java
index cf7380a9a9b..e328a50e962 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/RoleTagRetriever.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/RoleTagRetriever.java
@@ -23,9 +23,8 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import org.apache.commons.lang3.StringUtils;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
 
 /**
  * Input:  dvObject id, parent Id, and dvObject type (from Solr)
diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java
index a3150161c52..c078860ad8e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java
+++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java
@@ -6,19 +6,19 @@
 import java.sql.Timestamp;
 import java.util.Date;
 import java.util.UUID;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
 
 @Table(indexes = {@Index(columnList="token")
 		, @Index(columnList="builtinuser_id")})
diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java
index b9eabf45159..b19721d56bb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java
@@ -16,19 +16,19 @@
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean;
 import java.util.Arrays;
 import java.util.Date;
 import java.util.List;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
 import org.apache.commons.lang3.StringUtils;
 import org.hibernate.validator.constraints.NotBlank;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java
index c8db23985d8..5d1c167d2a5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java
@@ -10,18 +10,17 @@
 import edu.harvard.iq.dataverse.util.SystemConfig;
 
 import java.text.MessageFormat;
-import java.util.Date;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 
 @Stateless
diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/FakePidProviderServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/FakePidProviderServiceBean.java
index 54d64710511..3bd9d9dd022 100644
--- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/FakePidProviderServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/FakePidProviderServiceBean.java
@@ -3,13 +3,13 @@
 import edu.harvard.iq.dataverse.DOIServiceBean;
 import edu.harvard.iq.dataverse.DvObject;
 import edu.harvard.iq.dataverse.GlobalId;
-import java.util.ArrayList;
+
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
 
-import javax.ejb.Stateless;
+import jakarta.ejb.Stateless;
 
 @Stateless
 public class FakePidProviderServiceBean extends DOIServiceBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PermaLinkPidProviderServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PermaLinkPidProviderServiceBean.java
index f387188b690..d145a7ec106 100644
--- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PermaLinkPidProviderServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PermaLinkPidProviderServiceBean.java
@@ -4,21 +4,17 @@
 import edu.harvard.iq.dataverse.DvObject;
 import edu.harvard.iq.dataverse.GlobalId;
 import edu.harvard.iq.dataverse.GlobalIdServiceBean;
-import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDatasetCommand;
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 
-import java.lang.StackWalker.StackFrame;
-import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
-import java.util.stream.Stream;
 
-import javax.annotation.PostConstruct;
-import javax.ejb.Stateless;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.Stateless;
 
 /**
  * PermaLink provider
diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidHelper.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidHelper.java
index 478f5d6c2c4..5bc855a9593 100644
--- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidHelper.java
@@ -1,10 +1,10 @@
 package edu.harvard.iq.dataverse.pidproviders;
 
 import java.util.Arrays;
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Singleton;
-import javax.ejb.Startup;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.ejb.Startup;
 
 import edu.harvard.iq.dataverse.DOIDataCiteServiceBean;
 import edu.harvard.iq.dataverse.DOIEZIdServiceBean;
diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java
index 4db7d099a47..78305648f67 100644
--- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java
@@ -16,13 +16,13 @@
 import java.util.Map;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.InternalServerErrorException;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.ServiceUnavailableException;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.BadRequestException;
+import jakarta.ws.rs.InternalServerErrorException;
+import jakarta.ws.rs.NotFoundException;
+import jakarta.ws.rs.ServiceUnavailableException;
 
 public class PidUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedDOIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedDOIServiceBean.java
index 20f1051763f..f7e9372cc9b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedDOIServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedDOIServiceBean.java
@@ -1,13 +1,12 @@
 package edu.harvard.iq.dataverse.pidproviders;
 
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
 
-import javax.annotation.PostConstruct;
-import javax.ejb.Stateless;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.Stateless;
 
 import org.apache.commons.httpclient.HttpException;
 import org.apache.commons.lang3.NotImplementedException;
diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedHandlenetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedHandlenetServiceBean.java
index 8847a99bd20..c856c5363e0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedHandlenetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedHandlenetServiceBean.java
@@ -7,7 +7,7 @@
 import java.util.*;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
+import jakarta.ejb.Stateless;
 import org.apache.commons.lang3.NotImplementedException;
 
 /** This class is just used to parse Handles that are not managed by any account configured in Dataverse
diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java
index b0658f10b34..9af4bb6af9e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java
@@ -4,10 +4,10 @@
 import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
 import java.io.Serializable;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /**
  * Backing bean for JSF page. Sets session to {@link PrivateUrlUser}. 
diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java
index 8eb0dfe4ebd..9e5879106e4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java
@@ -8,14 +8,14 @@
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.Serializable;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvEntityFileDataConverter.java b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvEntityFileDataConverter.java
index 65b97b80eea..ba5aba1b69b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvEntityFileDataConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvEntityFileDataConverter.java
@@ -1,11 +1,11 @@
 package edu.harvard.iq.dataverse.provenance;
 
-import javax.enterprise.inject.spi.CDI;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
-import javax.inject.Inject;
+import jakarta.enterprise.inject.spi.CDI;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
+import jakarta.inject.Inject;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java
index a17e77f2a9e..c2eecb90d9e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java
@@ -5,12 +5,12 @@
 import com.google.gson.GsonBuilder;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonParser;
-import edu.harvard.iq.dataverse.api.AbstractApiBean;
+
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 import org.everit.json.schema.Schema;
 import org.everit.json.schema.ValidationException;
 import org.everit.json.schema.loader.SchemaLoader;
diff --git a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java
index a4b7cdf8d4e..6e8a512902a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java
@@ -24,19 +24,19 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Level;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import org.apache.commons.io.IOUtils;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.ExternalContext;
-import javax.faces.context.FacesContext;
-import javax.json.JsonObject;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.ExternalContext;
+import jakarta.faces.context.FacesContext;
+import jakarta.json.JsonObject;
 import org.primefaces.model.file.UploadedFile;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java
index 4d89a2842cd..c252d2e3330 100644
--- a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java
@@ -6,10 +6,10 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.JsonArray;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.JsonArray;
 
 @Stateless
 @Named
diff --git a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java
index ee52254d6f5..8501fba3ce0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java
@@ -3,15 +3,15 @@
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.locality.StorageSite;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import edu.harvard.iq.dataverse.util.SystemConfig;
+
 import java.io.File;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
 
 public class RepositoryStorageAbstractionLayerUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java b/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java
index ef37569ac54..bc92959a5ac 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java
@@ -14,20 +14,16 @@
 import java.io.UnsupportedEncodingException;
 import java.net.URLEncoder;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
-import java.util.StringTokenizer;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.json.JsonObject;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.json.JsonObject;
 
 import org.apache.commons.lang3.StringUtils;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/FacetLabel.java b/src/main/java/edu/harvard/iq/dataverse/search/FacetLabel.java
index f8bd0ea5a10..f97c1641479 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/FacetLabel.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/FacetLabel.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.search;
 
-import javax.inject.Named;
+import jakarta.inject.Named;
 
 @Named
 public class FacetLabel implements Comparable<FacetLabel>{
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/Highlight.java b/src/main/java/edu/harvard/iq/dataverse/search/Highlight.java
index 98a882c13ca..d40d8c362af 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/Highlight.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/Highlight.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.search;
 
 import java.util.List;
-import javax.inject.Named;
+import jakarta.inject.Named;
 
 @Named
 public class Highlight {
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexAsync.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexAsync.java
index a04ae934259..c9cf1cd9dc7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/IndexAsync.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexAsync.java
@@ -5,9 +5,9 @@
 import java.util.Collection;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
 
 @Stateless
 public class IndexAsync {
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexBatchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexBatchServiceBean.java
index 932f58d875d..0eeb681514c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/IndexBatchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexBatchServiceBean.java
@@ -1,30 +1,27 @@
 package edu.harvard.iq.dataverse.search;
 
-import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetServiceBean;
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DataverseServiceBean;
 import edu.harvard.iq.dataverse.DvObjectServiceBean;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.IOException;
-import java.sql.Timestamp;
 import java.util.ArrayList;
-import java.util.Date;
 import java.util.List;
 import java.util.concurrent.Future;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.AsyncResult;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.AsyncResult;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import org.apache.solr.client.solrj.SolrServerException;
 
 @Named
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
index 669125090b8..d6d0be7a17b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
@@ -41,7 +41,6 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.sql.Timestamp;
-import java.text.NumberFormat;
 import java.text.SimpleDateFormat;
 import java.time.LocalDate;
 import java.util.ArrayList;
@@ -54,26 +53,25 @@
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
-import java.util.Optional;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.Future;
 import java.util.function.Function;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
-import javax.annotation.PostConstruct;
-import javax.annotation.PreDestroy;
-import javax.ejb.AsyncResult;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import static javax.ejb.TransactionAttributeType.REQUIRES_NEW;
-import javax.inject.Named;
-import javax.json.JsonObject;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.annotation.PostConstruct;
+import jakarta.annotation.PreDestroy;
+import jakarta.ejb.AsyncResult;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW;
+import jakarta.inject.Named;
+import jakarta.json.JsonObject;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBean.java
index 2bf8807e301..8caee7d16b4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBean.java
@@ -7,11 +7,11 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.faces.context.FacesContext;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.faces.context.FacesContext;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 
 @Named
 @Stateless
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java
index bfe397cf48c..2ce06541afa 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java
@@ -8,8 +8,6 @@
 import edu.harvard.iq.dataverse.DatasetFieldType;
 import edu.harvard.iq.dataverse.DatasetFieldType.FieldType;
 import edu.harvard.iq.dataverse.DatasetServiceBean;
-import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.DatasetVersion.VersionState;
 import edu.harvard.iq.dataverse.DatasetVersionServiceBean;
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DataverseFacet;
@@ -37,12 +35,12 @@
 import java.util.Optional;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.enterprise.context.RequestScoped;
-import javax.faces.context.FacesContext;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.RequestScoped;
+import jakarta.faces.context.FacesContext;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 import org.apache.commons.lang3.StringUtils;
 
 
@@ -215,7 +213,7 @@ public String searchRedirect(String dataverseRedirectPage, Dataverse dataverseIn
             qParam = "&q=" + query;
         }
 
-        return widgetWrapper.wrapURL(dataverseRedirectPage + "?faces-redirect=true&q=" + qParam + optionalDataverseScope);
+        return widgetWrapper.wrapURL(dataverseRedirectPage + "?faces-redirect=true" + qParam + optionalDataverseScope);
 
     }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchPermissionsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchPermissionsServiceBean.java
index e96164d442d..0dd2153f75b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SearchPermissionsServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchPermissionsServiceBean.java
@@ -22,9 +22,9 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
 
 /**
  * Determine whether items should be searchable.
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java
index 9ac4b2406e8..44976d232c2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java
@@ -20,7 +20,6 @@
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.IOException;
 import java.lang.reflect.Field;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Calendar;
@@ -36,16 +35,16 @@
 import java.util.MissingResourceException;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBTransactionRolledbackException;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionRolledbackLocalException;
-import javax.inject.Named;
-import javax.persistence.NoResultException;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBTransactionRolledbackException;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionRolledbackLocalException;
+import jakarta.inject.Named;
+import jakarta.persistence.NoResultException;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrQuery.SortClause;
 import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.HttpSolrClient.RemoteSolrException;
+import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException;
 import org.apache.solr.client.solrj.response.FacetField;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.client.solrj.response.RangeFacet;
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java
index 0dc2fe08b54..b36130de7c8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java
@@ -10,11 +10,11 @@
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 
-import javax.annotation.PostConstruct;
-import javax.annotation.PreDestroy;
-import javax.ejb.EJB;
-import javax.ejb.Singleton;
-import javax.inject.Named;
+import jakarta.annotation.PostConstruct;
+import jakarta.annotation.PreDestroy;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.inject.Named;
 import java.io.IOException;
 import java.util.logging.Logger;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java
index 5856004ce53..04021eb75b6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java
@@ -20,11 +20,11 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.response.UpdateResponse;
 import org.apache.solr.common.SolrInputDocument;
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java
index b499b80961e..893099ff08d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java
@@ -5,8 +5,8 @@
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.response.FacetField;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
index b58c22b64c9..6ad7f9dbbf6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
@@ -9,10 +9,10 @@
 import java.util.Map;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
 
 import org.apache.commons.collections4.CollectionUtils;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearch.java b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearch.java
index 66bb63ed596..ff4a2e4aa96 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearch.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearch.java
@@ -5,16 +5,16 @@
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.List;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToMany;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.Table;
 
 @Entity
 @Table(indexes = {@Index(columnList="definitionpoint_id")
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchFilterQuery.java b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchFilterQuery.java
index f884a9529c9..7f51f9c8728 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchFilterQuery.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchFilterQuery.java
@@ -1,16 +1,16 @@
 package edu.harvard.iq.dataverse.search.savedsearch;
 
 import java.io.Serializable;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 @Entity
 @Table(indexes = {@Index(columnList="savedsearch_id")})
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java
index 587e054dc4a..7fc2bdf79a3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java
@@ -21,24 +21,23 @@
 import edu.harvard.iq.dataverse.search.SortBy;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.Date;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Schedule;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Schedule;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
+import jakarta.servlet.http.HttpServletRequest;
 
 @Stateless
 @Named
diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java b/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java
index 160ed693eee..b1910a2fbb5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java
+++ b/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java
@@ -2,13 +2,13 @@
 
 import java.io.Serializable;
 import java.util.Objects;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.Id;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Id;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
 
 /**
  * A single value in the config of dataverse.
diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java
index ead0ac3bd39..2826df74ed1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java
@@ -5,16 +5,16 @@
 import edu.harvard.iq.dataverse.api.ApiBlockingFilter;
 import edu.harvard.iq.dataverse.util.StringUtil;
 
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.json.JsonValue;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.json.JsonValue;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 import org.json.JSONArray;
 import org.json.JSONException;
diff --git a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapServiceBean.java
index 14db98e540e..a51acd1f54f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapServiceBean.java
@@ -3,8 +3,8 @@
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.Dataverse;
 import java.util.List;
-import javax.ejb.Asynchronous;
-import javax.ejb.Stateless;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.Stateless;
 
 @Stateless
 public class SiteMapServiceBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java
index b132bff9429..6eb3a8df0bc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java
@@ -25,17 +25,17 @@
 import java.util.Iterator;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.annotation.PostConstruct;
-import javax.annotation.Resource;
-import javax.ejb.EJB;
-import javax.ejb.Singleton;
-import javax.ejb.Startup;
-import javax.ejb.Timeout;
-import javax.ejb.Timer;
-import javax.ejb.TimerConfig;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.annotation.PostConstruct;
+import jakarta.annotation.Resource;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.ejb.Startup;
+import jakarta.ejb.Timeout;
+import jakarta.ejb.Timer;
+import jakarta.ejb.TimerConfig;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.servlet.http.HttpServletRequest;
 
 
 /**
@@ -55,7 +55,7 @@ public class DataverseTimerServiceBean implements Serializable {
     private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.timer.DataverseTimerServiceBean");
     
     @Resource
-    javax.ejb.TimerService timerService;
+    jakarta.ejb.TimerService timerService;
     @EJB
     HarvesterServiceBean harvesterService;
     @EJB
@@ -109,7 +109,7 @@ public void createTimer(Date initialExpiration, long intervalDuration, Serializa
      */
     @Timeout
     @TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
-    public void handleTimeout(javax.ejb.Timer timer) {
+    public void handleTimeout(jakarta.ejb.Timer timer) {
         // We have to put all the code in a try/catch block because
         // if an exception is thrown from this method, Glassfish will automatically
         // call the method a second time. (The minimum number of re-tries for a Timer method is 1)
diff --git a/src/main/java/edu/harvard/iq/dataverse/userdata/UserListResult.java b/src/main/java/edu/harvard/iq/dataverse/userdata/UserListResult.java
index 07937638607..5e7f532d2ac 100644
--- a/src/main/java/edu/harvard/iq/dataverse/userdata/UserListResult.java
+++ b/src/main/java/edu/harvard/iq/dataverse/userdata/UserListResult.java
@@ -7,14 +7,13 @@
 
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.mydata.Pager;
-import edu.harvard.iq.dataverse.util.BundleUtil;
+
 import java.util.ArrayList;
 import java.util.List;
-import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java
index 26694c0ac6c..922e6ff5d28 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java
@@ -12,7 +12,7 @@
 import java.util.logging.Logger;
 import java.util.Map;
 import java.util.HashMap;
-import javax.faces.context.FacesContext;
+import jakarta.faces.context.FacesContext;
 
 public class BundleUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/ClockUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/ClockUtil.java
index 9c1c89430d5..d51f70229a9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/ClockUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/ClockUtil.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.util;
 
-import javax.enterprise.inject.Produces;
-import javax.inject.Qualifier;
-import javax.inject.Singleton;
+import jakarta.enterprise.inject.Produces;
+import jakarta.inject.Qualifier;
+import jakarta.inject.Singleton;
 import java.lang.annotation.ElementType;
 import java.lang.annotation.Retention;
 import java.lang.annotation.RetentionPolicy;
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/ConstraintViolationUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/ConstraintViolationUtil.java
index d2e59fac9f5..1910fde6489 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/ConstraintViolationUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/ConstraintViolationUtil.java
@@ -5,8 +5,8 @@
  */
 package edu.harvard.iq.dataverse.util;
 
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java b/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java
index 800c05ae6dc..62cd318706f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse.util;
 
-import javax.annotation.Resource;
-import javax.annotation.sql.DataSourceDefinition;
-import javax.enterprise.inject.Produces;
-import javax.inject.Singleton;
+import jakarta.annotation.Resource;
+import jakarta.annotation.sql.DataSourceDefinition;
+import jakarta.enterprise.inject.Produces;
+import jakarta.inject.Singleton;
 import javax.sql.DataSource;
 
-// Find docs here: https://javaee.github.io/javaee-spec/javadocs/javax/annotation/sql/DataSourceDefinition.html
+// Find docs here: https://jakarta.ee/specifications/annotations/2.1/apidocs/jakarta.annotation/jakarta/annotation/sql/datasourcedefinition
 @Singleton
 @DataSourceDefinition(
         name = "java:app/jdbc/dataverse",
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/DatasetFieldWalker.java b/src/main/java/edu/harvard/iq/dataverse/util/DatasetFieldWalker.java
index df97998d9e8..25032860d11 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/DatasetFieldWalker.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/DatasetFieldWalker.java
@@ -3,7 +3,6 @@
 import edu.harvard.iq.dataverse.ControlledVocabularyValue;
 import edu.harvard.iq.dataverse.DatasetField;
 import edu.harvard.iq.dataverse.DatasetFieldCompoundValue;
-import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
 import edu.harvard.iq.dataverse.DatasetFieldType;
 import edu.harvard.iq.dataverse.DatasetFieldValue;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
@@ -12,12 +11,10 @@
 import java.util.Comparator;
 import java.util.List;
 import java.util.Map;
-import java.util.SortedSet;
-import java.util.TreeSet;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonObject;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
 
 /**
  * A means of iterating over {@link DatasetField}s, or a collection of them.
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/EjbUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/EjbUtil.java
index cf337b0a020..fd8c8fa4a9c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/EjbUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/EjbUtil.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.util;
 
-import javax.ejb.EJBException;
+import jakarta.ejb.EJBException;
 
 public class EjbUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java
index 7137db9ca78..5f7643b3115 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java
@@ -64,8 +64,6 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
-import java.nio.channels.FileChannel;
-import java.nio.channels.WritableByteChannel;
 import java.nio.charset.Charset;
 import java.nio.file.Files;
 import java.nio.file.Path;
@@ -87,11 +85,11 @@
 import java.util.UUID;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.activation.MimetypesFileTypeMap;
-import javax.ejb.EJBException;
-import javax.enterprise.inject.spi.CDI;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
+import jakarta.activation.MimetypesFileTypeMap;
+import jakarta.ejb.EJBException;
+import jakarta.enterprise.inject.spi.CDI;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLStreamConstants;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamReader;
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/JsfHelper.java b/src/main/java/edu/harvard/iq/dataverse/util/JsfHelper.java
index 5b87b18573b..b02ac63cacd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/JsfHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/JsfHelper.java
@@ -1,11 +1,9 @@
 package edu.harvard.iq.dataverse.util;
 
-import java.util.Locale;
-import java.util.ResourceBundle;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
 
 /**
  * Utility class for common JSF tasks.
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java
index 72980c3451a..dcb6e078df6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java
@@ -8,8 +8,8 @@
 import java.util.Arrays;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.mail.internet.AddressException;
-import javax.mail.internet.InternetAddress;
+import jakarta.mail.internet.AddressException;
+import jakarta.mail.internet.InternetAddress;
 
 public class MailUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java
index 431013771c5..f68957ad060 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java
@@ -4,10 +4,10 @@
 import java.util.List;
 import java.util.logging.Logger;
 
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonString;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonString;
 
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/RequiredCheckboxValidator.java b/src/main/java/edu/harvard/iq/dataverse/util/RequiredCheckboxValidator.java
index 0221d45e4b0..fdac50ee58a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/RequiredCheckboxValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/RequiredCheckboxValidator.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse.util;
 
 import java.text.MessageFormat;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.validator.Validator;
-import javax.faces.validator.ValidatorException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.validator.Validator;
+import jakarta.faces.validator.ValidatorException;
 
 /**
  * from http://balusc.blogspot.com/2008/09/validate-required-checkbox.html via
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SessionUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/SessionUtil.java
index 0539ea40cb8..effa1980d70 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/SessionUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/SessionUtil.java
@@ -4,8 +4,8 @@
 import java.util.HashMap;
 import java.util.Map.Entry;
 
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpSession;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpSession;
 
 public class SessionUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java
index 54be3a8765f..2c9b7167059 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java
@@ -16,9 +16,9 @@ Two configurable options allow changing the limit for the number of authors or d
 
 import edu.harvard.iq.dataverse.*;
 import edu.harvard.iq.dataverse.dataset.DatasetUtil;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
 import java.util.ArrayList;
 import java.util.LinkedList;
 import java.util.List;
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
index b9459e748bf..4fed3a05976 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
@@ -13,15 +13,15 @@
 import edu.harvard.iq.dataverse.validation.PasswordValidatorUtil;
 import org.passay.CharacterRule;
 
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.json.JsonString;
-import javax.json.JsonValue;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue;
 import java.io.StringReader;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java
index 4acf2d544e8..4ae76a7b8db 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java
@@ -5,8 +5,8 @@
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import javax.json.Json;
-import javax.json.JsonValue;
+import jakarta.json.Json;
+import jakarta.json.JsonValue;
 
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.Dataset;
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/WebloaderUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/WebloaderUtil.java
index c2d9bf67236..acbdc6aa3c6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/WebloaderUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/WebloaderUtil.java
@@ -1,22 +1,12 @@
 package edu.harvard.iq.dataverse.util;
 
-import java.util.Date;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.Locale;
-import java.util.Map.Entry;
 import java.util.logging.Logger;
 
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpSession;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpSession;
 
 import edu.harvard.iq.dataverse.Dataset;
-import edu.harvard.iq.dataverse.DatasetPage;
-import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
-import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
-import edu.harvard.iq.dataverse.authorization.users.User;
-import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 
 public class WebloaderUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java
index 8061b6e339e..baba1a0cb43 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java
@@ -110,7 +110,7 @@ public class BagGenerator {
 
     private String apiKey = null;
 
-    private javax.json.JsonObject oremapObject;
+    private jakarta.json.JsonObject oremapObject;
     private JsonObject aggregation;
 
     private String dataciteXml;
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
index c9e71296d0f..b3995b5957e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
@@ -30,12 +30,12 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonValue;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonValue;
 
 import org.apache.commons.lang3.exception.ExceptionUtils;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java
index 6cd7f0928dc..4d63edac268 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java
@@ -3,10 +3,10 @@
 import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Singleton;
-import javax.ejb.Startup;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.ejb.Startup;
 
 /**
  * This is a small helper bean 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactory.java b/src/main/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactory.java
index 53c80037223..4b0263030dc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactory.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactory.java
@@ -7,10 +7,10 @@
 import edu.harvard.iq.dataverse.util.bagit.data.FileDataProviderFactory;
 import edu.harvard.iq.dataverse.util.bagit.data.FileUtilWrapper;
 
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.enterprise.context.SessionScoped;
-import javax.inject.Named;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.SessionScoped;
+import jakarta.inject.Named;
 import java.io.Serializable;
 import java.util.Optional;
 import java.util.logging.Logger;
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java
index ee0a882a10d..3fcaf6b11ff 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java
@@ -1,11 +1,10 @@
 package edu.harvard.iq.dataverse.util.json;
 
 import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser;
 import edu.harvard.iq.dataverse.MetadataBlock;
 import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
 import edu.harvard.iq.dataverse.workflow.Workflow;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.JsonObjectBuilder;
 
 /**
  * A Json printer that prints minimal data on objects. Useful when embedding 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java
index 0fd9705031d..113a6128364 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java
@@ -18,17 +18,17 @@
 import java.util.logging.Logger;
 
 
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonString;
-import javax.json.JsonValue;
-import javax.json.JsonWriter;
-import javax.json.JsonWriterFactory;
-import javax.json.JsonValue.ValueType;
-import javax.json.stream.JsonGenerator;
-import javax.ws.rs.BadRequestException;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue;
+import jakarta.json.JsonWriter;
+import jakarta.json.JsonWriterFactory;
+import jakarta.json.JsonValue.ValueType;
+import jakarta.json.stream.JsonGenerator;
+import jakarta.ws.rs.BadRequestException;
 
 import edu.harvard.iq.dataverse.ControlledVocabularyValue;
 import edu.harvard.iq.dataverse.Dataset;
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java
index 59290449988..febb785cd95 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java
@@ -50,13 +50,13 @@
 import java.util.Set;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.json.JsonString;
-import javax.json.JsonValue;
-import javax.json.JsonValue.ValueType;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue;
+import jakarta.json.JsonValue.ValueType;
 
 /**
  * Parses JSON objects into domain objects.
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
index 5080ae524c7..b6026998bb7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
@@ -17,7 +17,6 @@
 import edu.harvard.iq.dataverse.DataverseContact;
 import edu.harvard.iq.dataverse.DataverseFacet;
 import edu.harvard.iq.dataverse.DataverseTheme;
-import edu.harvard.iq.dataverse.api.Datasets;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.groups.impl.maildomain.MailDomainGroup;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser;
@@ -55,12 +54,10 @@
 import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData;
 
 import java.util.*;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonValue;
-
-import org.apache.commons.collections4.CollectionUtils;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonValue;
 
 import java.util.function.BiConsumer;
 import java.util.function.BinaryOperator;
@@ -71,10 +68,10 @@
 import java.util.stream.Collectors;
 import static java.util.stream.Collectors.toList;
 
-import javax.ejb.EJB;
-import javax.ejb.Singleton;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
 
 /**
  * Convert objects to Json.
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java
index 1c7dce24680..55f9ecb5ce8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java
@@ -3,10 +3,10 @@
 import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Singleton;
-import javax.ejb.Startup;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.ejb.Startup;
 
 /**
  * This is a small helper bean 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java
index d02099eddb5..09d02854bab 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java
@@ -9,11 +9,11 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonWriter;
-import javax.json.JsonWriterFactory;
-import javax.json.stream.JsonGenerator;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonWriter;
+import jakarta.json.JsonWriterFactory;
+import jakarta.json.stream.JsonGenerator;
 
 public class JsonUtil {
 
@@ -47,7 +47,7 @@ public static String prettyPrint(JsonArray jsonArray) {
         return stringWriter.toString();
     }
 
-    public static String prettyPrint(javax.json.JsonObject jsonObject) {
+    public static String prettyPrint(jakarta.json.JsonObject jsonObject) {
         Map<String, Boolean> config = new HashMap<>();
         config.put(JsonGenerator.PRETTY_PRINTING, true);
         JsonWriterFactory jsonWriterFactory = Json.createWriterFactory(config);
@@ -58,13 +58,13 @@ public static String prettyPrint(javax.json.JsonObject jsonObject) {
         return stringWriter.toString();
     }
     
-    public static javax.json.JsonObject getJsonObject(String serializedJson) {
+    public static jakarta.json.JsonObject getJsonObject(String serializedJson) {
         try (StringReader rdr = new StringReader(serializedJson)) {
             return Json.createReader(rdr).readObject();
         }
     }
     
-    public static javax.json.JsonArray getJsonArray(String serializedJson) {
+    public static jakarta.json.JsonArray getJsonArray(String serializedJson) {
         try (StringReader rdr = new StringReader(serializedJson)) {
             return Json.createReader(rdr).readArray();
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java b/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java
index 59a23a43452..ef8ab39122f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java
@@ -5,11 +5,11 @@
 import java.math.BigDecimal;
 import java.math.BigInteger;
 import java.sql.Timestamp;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonValue;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonValue;
 
 /**
  * A JSON builder that drops any null values. If we didn't drop'em,
diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/EMailValidator.java b/src/main/java/edu/harvard/iq/dataverse/validation/EMailValidator.java
index 5050aad5bf7..624e49623f2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/validation/EMailValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/validation/EMailValidator.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.validation;
 
-import javax.validation.ConstraintValidator;
-import javax.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintValidator;
+import jakarta.validation.ConstraintValidatorContext;
 
 import org.apache.commons.validator.routines.EmailValidator;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java
index c32e6728358..41e7f1b8b22 100644
--- a/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java
@@ -18,9 +18,9 @@
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
 import org.passay.CharacterCharacteristicsRule;
 import org.passay.CharacterRule;
 import org.passay.DictionaryRule;
diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java b/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java
index 846ae48783a..285f34d3f8c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java
@@ -1,7 +1,6 @@
 package edu.harvard.iq.dataverse.validation;
-import edu.harvard.iq.dataverse.util.BundleUtil;
-import javax.validation.ConstraintValidator;
-import javax.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintValidator;
+import jakarta.validation.ConstraintValidatorContext;
 import org.apache.commons.validator.routines.UrlValidator;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateEmail.java b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateEmail.java
index 310dc950858..6ec677bd7a8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateEmail.java
+++ b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateEmail.java
@@ -11,8 +11,8 @@
 import java.lang.annotation.Retention;
 import java.lang.annotation.Target;
 
-import javax.validation.Constraint;
-import javax.validation.Payload;
+import jakarta.validation.Constraint;
+import jakarta.validation.Payload;
 /**
  *
  * @author skraffmi
diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateURL.java b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateURL.java
index 5aaab0c2e8e..3834b119598 100644
--- a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateURL.java
+++ b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateURL.java
@@ -6,8 +6,8 @@
 import java.lang.annotation.Retention;
 import static java.lang.annotation.RetentionPolicy.RUNTIME;
 import java.lang.annotation.Target;
-import javax.validation.Constraint;
-import javax.validation.Payload;
+import jakarta.validation.Constraint;
+import jakarta.validation.Payload;
 
 @Target({FIELD})
 @Retention(RUNTIME)
diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateUserName.java b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateUserName.java
index 0583b70df49..6307edd073a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateUserName.java
+++ b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateUserName.java
@@ -10,11 +10,11 @@
 import java.lang.annotation.Retention;
 import static java.lang.annotation.RetentionPolicy.RUNTIME;
 import java.lang.annotation.Target;
-import javax.validation.Constraint;
-import javax.validation.Payload;
-import javax.validation.constraints.NotBlank;
-import javax.validation.constraints.Size;
-import javax.validation.constraints.Pattern;
+import jakarta.validation.Constraint;
+import jakarta.validation.Payload;
+import jakarta.validation.constraints.NotBlank;
+import jakarta.validation.constraints.Size;
+import jakarta.validation.constraints.Pattern;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java b/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java
index 577e0f756de..94fefa9bc13 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java
@@ -9,14 +9,14 @@
 import java.io.Serializable;
 import java.util.HashMap;
 import java.util.Map;
-import javax.persistence.ElementCollection;
-import javax.persistence.Entity;
-import javax.persistence.FetchType;
-import javax.persistence.Id;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToOne;
+import jakarta.persistence.ElementCollection;
+import jakarta.persistence.Entity;
+import jakarta.persistence.FetchType;
+import jakarta.persistence.Id;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
 
 /**
  * A workflow whose current step waits for an external system to complete a
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java b/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java
index 6c73ed0e64b..bd32c517bc6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java
@@ -6,16 +6,16 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.FetchType;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.OrderColumn;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.FetchType;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OrderColumn;
 
 /**
  * A list of steps that can be executed with a given context. 
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java
index cf78c4f8cdf..47f24c9b8bd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java
@@ -6,7 +6,6 @@
 import edu.harvard.iq.dataverse.EjbDataverseEngine;
 import edu.harvard.iq.dataverse.RoleAssigneeServiceBean;
 import edu.harvard.iq.dataverse.UserNotification;
-import edu.harvard.iq.dataverse.UserNotification.Type;
 import edu.harvard.iq.dataverse.UserNotificationServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -33,15 +32,15 @@
 import java.util.Optional;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Inject;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Inject;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 /**
  * Service bean for managing and executing {@link Workflow}s
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/ArchivalSubmissionWorkflowStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/ArchivalSubmissionWorkflowStep.java
index 105af6a00d8..b0567bff107 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/ArchivalSubmissionWorkflowStep.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/ArchivalSubmissionWorkflowStep.java
@@ -1,6 +1,5 @@
 package edu.harvard.iq.dataverse.workflow.internalspi;
 
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
@@ -10,13 +9,12 @@
 import edu.harvard.iq.dataverse.workflow.step.WorkflowStep;
 import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult;
 
-import java.lang.reflect.Constructor;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import javax.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletRequest;
 
 /**
  * A step that submits a BagIT bag of the newly published dataset version via a
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java
index bbe200aaeb3..ee770d4057e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java
@@ -6,23 +6,18 @@
 import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType;
 import edu.harvard.iq.dataverse.workflow.step.Failure;
 import edu.harvard.iq.dataverse.workflow.step.Pending;
-import edu.harvard.iq.dataverse.workflow.step.Success;
 import edu.harvard.iq.dataverse.workflow.step.WorkflowStep;
 import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult;
 import edu.harvard.iq.dataverse.workflows.WorkflowUtil;
 
-import static edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult.OK;
-
-import java.io.StringReader;
 import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import java.util.regex.Pattern;
 
-import javax.json.Json;
-import javax.json.JsonObject;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
 
 import org.apache.commons.httpclient.HttpClient;
 import org.apache.commons.httpclient.HttpMethodBase;
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/LDNAnnounceDatasetVersionStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/LDNAnnounceDatasetVersionStep.java
index 13024f9f68f..124eea801d9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/LDNAnnounceDatasetVersionStep.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/LDNAnnounceDatasetVersionStep.java
@@ -27,12 +27,12 @@
 import java.util.UUID;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonValue;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonValue;
 
 import org.apache.http.client.methods.CloseableHttpResponse;
 import org.apache.http.client.methods.HttpPost;
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java b/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java
index a06531a2666..07bcf247533 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java
@@ -3,14 +3,14 @@
 import edu.harvard.iq.dataverse.workflow.Workflow;
 import java.io.Serializable;
 import java.util.Map;
-import javax.persistence.Column;
-import javax.persistence.ElementCollection;
-import javax.persistence.Entity;
-import javax.persistence.FetchType;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.ManyToOne;
+import jakarta.persistence.Column;
+import jakarta.persistence.ElementCollection;
+import jakarta.persistence.Entity;
+import jakarta.persistence.FetchType;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.ManyToOne;
 
 /**
  * A database row describing a step in a workflow. Actual steps can be instantiated
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java
index d03afcaa91a..7cfa226d7ba 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java
@@ -5,14 +5,14 @@
 import java.io.Serializable;
 import java.sql.Timestamp;
 import java.util.Date;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
 
 @Entity
 public class WorkflowComment implements Serializable {
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java
index e6e6bfd23c8..456b829ba61 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java
@@ -8,12 +8,11 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
 
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
-import edu.harvard.iq.dataverse.workflow.internalspi.PauseWithMessageStep;
 import edu.harvard.iq.dataverse.workflow.step.Failure;
 import edu.harvard.iq.dataverse.workflow.step.Success;
 import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult;
diff --git a/src/main/resources/META-INF/batch-jobs/FileSystemImportJob.xml b/src/main/resources/META-INF/batch-jobs/FileSystemImportJob.xml
index 167fbdbec5d..0294f15e967 100644
--- a/src/main/resources/META-INF/batch-jobs/FileSystemImportJob.xml
+++ b/src/main/resources/META-INF/batch-jobs/FileSystemImportJob.xml
@@ -34,14 +34,14 @@
             </writer>
             <skippable-exception-classes>
                 <!-- To skip all the exceptions -->
-                <include class="javax.transaction.RollbackException"/>
+                <include class="jakarta.transaction.RollbackException"/>
                 <include class="java.lang.NullPointerException"/>
                 <include class="java.lang.Exception"/>
                 <include class="java.lang.Throwable"/>
             </skippable-exception-classes>
             <no-rollback-exception-classes>
                 <!-- To skip all the exceptions -->
-                <include class="javax.transaction.RollbackException"/>
+                <include class="jakarta.transaction.RollbackException"/>
                 <include class="java.lang.NullPointerException"/>
                 <include class="java.lang.Exception"/>
                 <include class="java.lang.Throwable"/>
diff --git a/src/main/resources/META-INF/persistence.xml b/src/main/resources/META-INF/persistence.xml
index 45552f36939..e6224dcdf01 100644
--- a/src/main/resources/META-INF/persistence.xml
+++ b/src/main/resources/META-INF/persistence.xml
@@ -1,11 +1,11 @@
 <?xml version="1.0" encoding="UTF-8"?>
-<persistence version="1.0" xmlns="http://java.sun.com/xml/ns/persistence" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_1_0.xsd">
+<persistence version="3.0" xmlns="https://jakarta.ee/xml/ns/persistence"
+             xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+             xsi:schemaLocation="https://jakarta.ee/xml/ns/persistence https://jakarta.ee/xml/ns/persistence/persistence_3_0.xsd">
     <persistence-unit name="VDCNet-ejbPU" transaction-type="JTA">
-        <!-- provider>oracle.toplink.essentials.ejb.cmp3.EntityManagerFactoryProvider</provider-->
         <provider>org.eclipse.persistence.jpa.PersistenceProvider</provider>
         <jta-data-source>java:app/jdbc/dataverse</jta-data-source>
         <properties>
-            <!--property name="toplink.logging.level" value="FINE"/-->
 	    <!-- disabling weaving, as an experiment: - L.A. -->
 	    <property name="eclipselink.weaving" value="false"/>
 	    <!-- comment out the line below to make the app NOT build 
diff --git a/src/main/webapp/404static.xhtml b/src/main/webapp/404static.xhtml
new file mode 100644
index 00000000000..69ff17ebc0f
--- /dev/null
+++ b/src/main/webapp/404static.xhtml
@@ -0,0 +1,109 @@
+<?xml version='1.0' encoding='UTF-8' ?>
+<!DOCTYPE html>
+<html xmlns="http://www.w3.org/1999/xhtml" xmlns:h="http://java.sun.com/jsf/html" lang="en">
+    <head>
+        <title>#{bundle['error.404.page.title']}</title>
+        <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+        <meta http-equiv="Content-Language" content="en" />
+        <meta name="viewport" content="width=device-width, initial-scale=1" />
+        <meta http-equiv="X-UA-Compatible" content="IE=edge" />
+        <meta name="description" content="Share, archive, and get credit for your data. Find and cite data across all research fields." />
+        
+        <link type="image/png" rel="icon" href="https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/images/favicondataverse.png.xhtml" />
+
+        <link type="image/png" rel="image_src" href="https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/images/dataverseproject.png.xhtml" />
+        
+        <link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/3.4.1/css/bootstrap.min.css" />
+
+        <link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/3.4.1/css/bootstrap-theme.min.css" />
+
+        <link rel="stylesheet" href="https://cdn.rawgit.com/IQSS/dataverse/develop/src/main/webapp/resources/css/structure.css" />
+
+        <style type="text/css">
+            /* FontCustom CSS */
+            @font-face {
+              font-family: "fontcustom";
+              src: url("https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/fontcustom/fontcustom_0cdeefae934823416d24b6c2132ac702.eot");
+              src: url("https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/fontcustom/fontcustom_0cdeefae934823416d24b6c2132ac702.eot?#iefix") format("embedded-opentype"),
+                   url("https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/fontcustom/fontcustom_0cdeefae934823416d24b6c2132ac702.woff") format("woff"),
+                   url("https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/fontcustom/fontcustom_0cdeefae934823416d24b6c2132ac702.ttf") format("truetype"),
+                   url("https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/fontcustom/fontcustom_0cdeefae934823416d24b6c2132ac702.svg#fontcustom") format("svg");
+              font-weight: normal;
+              font-style: normal;
+            }
+
+            @media screen and (-webkit-min-device-pixel-ratio:0) {
+              @font-face {
+                font-family: "fontcustom";
+                src: url("https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/fontcustom/fontcustom_0cdeefae934823416d24b6c2132ac702.svg#fontcustom") format("svg");
+              }
+            }
+            
+            [data-icon]:before { content: attr(data-icon); }
+
+            [data-icon]:before,
+            .icon-dataverse:before {
+              display: inline-block;
+              font-family: "fontcustom";
+              font-style: normal;
+              font-weight: normal;
+              font-variant: normal;
+              line-height: 1;
+              text-decoration: inherit;
+              text-rendering: optimizeLegibility;
+              text-transform: none;
+              -moz-osx-font-smoothing: grayscale;
+              -webkit-font-smoothing: antialiased;
+              font-smoothing: antialiased;
+            }
+
+            .icon-dataverse:before { content: "\f100"; }
+
+            /* Custom CSS */
+            #navbarFixed div.navbar-header img.navbar-brand.custom-logo {height:50px !important;}
+            #dataverseDesc span > span > span > h3 {font-weight: 300 !important;}
+
+            nav.navbar.navbar-default {background: #ececec !important;}
+        </style>
+    </head>
+    <body>
+        <div id="dataverse-header-block">
+            <!-- Navbar Panel -->
+            <nav id="navbarFixed" class="navbar navbar-default navbar-fixed-top" role="navigation">
+                <div class="container">
+                    <div class="navbar-header">
+                        <h:outputFormat class="navbar-brand custom-logo" value="#{bundle['footer.dataverseProject']}"/>
+                    </div>
+                    <div class="collapse navbar-collapse" id="topNavBar">
+                        <ul class="nav navbar-nav navbar-right">
+                            <li>
+                                &#160;
+                            </li>
+                        </ul>
+                    </div>
+                </div>
+            </nav>
+        </div>
+        <div class="container" id="content">
+            <div class="alert alert-danger" role="alert" style="margin-top:3em;">
+                <h:outputFormat value="#{bundle['error.404.message']}" escape="false" />
+            </div>
+        </div>
+        <div id="footer">
+            <div class="container">
+                <div class="row">
+                    <div class="col-sm-8 small">
+                        <p>Copyright &#169; 2023, The President &#38; Fellows of Harvard College | <a href="http://best-practices.dataverse.org/harvard-policies/harvard-privacy-policy.html" target="_blank">Privacy Policy</a>
+                        </p>
+                    </div>
+                    <div class="col-sm-4 text-right">
+                        <div class="poweredbylogo">
+                            <span>Powered by</span> 
+                            <a href="http://dataverse.org/" title="The Dataverse Project" target="_blank"><img src="https://cdn.rawgit.com/IQSS/dataverse/develop/src/main/webapp/resources/images/dataverseproject_logo.png" alt="The Dataverse Project" /></a>
+                        </div>
+                    </div>
+                </div>
+            </div>
+        </div>
+    </body>
+</html>
diff --git a/src/main/webapp/WEB-INF/beans.xml b/src/main/webapp/WEB-INF/beans.xml
index 4ca8195bea5..f6c5b8cbbc4 100644
--- a/src/main/webapp/WEB-INF/beans.xml
+++ b/src/main/webapp/WEB-INF/beans.xml
@@ -1,5 +1,7 @@
 <?xml version="1.0" encoding="UTF-8"?>
-<beans xmlns="http://java.sun.com/xml/ns/javaee"
+<beans xmlns="https://jakarta.ee/xml/ns/jakartaee"
        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-       xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/beans_1_0.xsd">
+       xsi:schemaLocation="https://jakarta.ee/xml/ns/jakartaee https://jakarta.ee/xml/ns/jakartaee/beans_4_0.xsd"
+       bean-discovery-mode="all">
+    <!-- 2023-06: Note that if you change bean-discovery-mode from "all" to "allocated", SWORD APIs do not work. -->
 </beans>
diff --git a/src/main/webapp/WEB-INF/faces-config.xml b/src/main/webapp/WEB-INF/faces-config.xml
index 2015ca55f5f..6eeb5a65baf 100644
--- a/src/main/webapp/WEB-INF/faces-config.xml
+++ b/src/main/webapp/WEB-INF/faces-config.xml
@@ -1,8 +1,7 @@
-<faces-config xmlns="http://xmlns.jcp.org/xml/ns/javaee"
+<faces-config xmlns="https://jakarta.ee/xml/ns/jakartaee"
               xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-              xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/javaee
-        http://xmlns.jcp.org/xml/ns/javaee/web-facesconfig_2_2.xsd"
-              version="2.2">
+              xsi:schemaLocation="https://jakarta.ee/xml/ns/jakartaee https://jakarta.ee/xml/ns/jakartaee/web-facesconfig_4_0.xsd"
+              version="4.0">
     <application>
         <resource-bundle>
             <base-name>edu.harvard.iq.dataverse.util.LocalBundle</base-name>
diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml
index 8179ca970d5..427615f2f0b 100644
--- a/src/main/webapp/WEB-INF/web.xml
+++ b/src/main/webapp/WEB-INF/web.xml
@@ -1,10 +1,13 @@
 <?xml version="1.0" encoding="UTF-8"?>
-<web-app version="3.0" xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://java.sun.com/xml/ns/javaee  http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd">
+<web-app xmlns="https://jakarta.ee/xml/ns/jakartaee"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="https://jakarta.ee/xml/ns/jakartaee https://jakarta.ee/xml/ns/jakartaee/web-app_6_0.xsd"
+         version="6.0">
     <display-name>Dataverse</display-name>
     <!-- Error page -->
     <error-page>
         <error-code>404</error-code>
-        <location>/404.xhtml</location>
+        <location>/404static.xhtml</location>
     </error-page>
     <error-page>
         <error-code>500</error-code>
@@ -27,7 +30,7 @@
         See also dev guide: https://guides.dataverse.org/en/latest/developers/debugging.html
      -->
     <context-param>
-        <param-name>javax.faces.PROJECT_STAGE</param-name>
+        <param-name>jakarta.faces.PROJECT_STAGE</param-name>
         <!-- Uses Microprofile Config to replace at runtime. Not standardized, Payara App Server specific. -->
         <param-value>${MPCONFIG=dataverse.jsf.project-stage:Production}</param-value>
     </context-param>
@@ -43,23 +46,23 @@
     <!-- /context-param -->
     <context-param>
         <param-name>
-            javax.faces.INTERPRET_EMPTY_STRING_SUBMITTED_VALUES_AS_NULL
+            jakarta.faces.INTERPRET_EMPTY_STRING_SUBMITTED_VALUES_AS_NULL
         </param-name>
         <!-- Uses Microprofile Config to replace at runtime. Not standardized, Payara App Server specific. -->
         <param-value>${MPCONFIG=dataverse.jsf.empty-string-null:true}</param-value>
     </context-param>
     <context-param>
-        <param-name>javax.faces.FACELETS_SKIP_COMMENTS</param-name>
+        <param-name>jakarta.faces.FACELETS_SKIP_COMMENTS</param-name>
         <!-- Uses Microprofile Config to replace at runtime. Not standardized, Payara App Server specific. -->
         <param-value>${MPCONFIG=dataverse.jsf.skip-comments:true}</param-value>
     </context-param>
     <context-param>
-        <param-name>javax.faces.FACELETS_BUFFER_SIZE</param-name>
+        <param-name>jakarta.faces.FACELETS_BUFFER_SIZE</param-name>
         <!-- Uses Microprofile Config to replace at runtime. Not standardized, Payara App Server specific. -->
         <param-value>${MPCONFIG=dataverse.jsf.buffer-size:102400}</param-value>
     </context-param>
     <context-param>
-        <param-name>javax.faces.FACELETS_REFRESH_PERIOD</param-name>
+        <param-name>jakarta.faces.FACELETS_REFRESH_PERIOD</param-name>
         <!-- Uses Microprofile Config to replace at runtime. Not standardized, Payara App Server specific. -->
         <param-value>${MPCONFIG=dataverse.jsf.refresh-period:-1}</param-value>
     </context-param>
@@ -86,14 +89,9 @@
     </filter-mapping>
     <servlet>
         <servlet-name>Faces Servlet</servlet-name>
-        <servlet-class>javax.faces.webapp.FacesServlet</servlet-class>
+        <servlet-class>jakarta.faces.webapp.FacesServlet</servlet-class>
         <load-on-startup>1</load-on-startup>
     </servlet>
-    <servlet>
-        <servlet-name>Push Servlet</servlet-name>
-        <servlet-class>org.primefaces.push.PushServlet</servlet-class>
-        <async-supported>true</async-supported>
-    </servlet>
     <!-- Map these files with JSF -->
     <servlet>
         <servlet-name>OAIServlet</servlet-name>
@@ -128,10 +126,6 @@
         <servlet-name>Faces Servlet</servlet-name>
         <url-pattern>*.xhtml</url-pattern>
     </servlet-mapping>
-    <servlet-mapping>
-        <servlet-name>Push Servlet</servlet-name>
-        <url-pattern>/primepush/*</url-pattern>
-    </servlet-mapping>
     <servlet-mapping>
         <servlet-name>OAIServlet</servlet-name>
         <url-pattern>/oai</url-pattern>
@@ -274,5 +268,13 @@
         <servlet-name>edu.harvard.iq.dataverse.api.datadeposit.SWORDv2ContainerServlet</servlet-name>
         <url-pattern>/dvn/api/data-deposit/v1.1/swordv2/edit/*</url-pattern>
     </servlet-mapping>
+    <filter>
+        <filter-name>edu.harvard.iq.dataverse.api.datadeposit.SwordFilter</filter-name>
+        <filter-class>edu.harvard.iq.dataverse.api.datadeposit.SwordFilter</filter-class>
+    </filter>
+    <filter-mapping>
+        <filter-name>edu.harvard.iq.dataverse.api.datadeposit.SwordFilter</filter-name>
+        <url-pattern>/dvn/api/data-deposit/v1.1/swordv2/edit-media/*</url-pattern>
+    </filter-mapping>
     <!-- END Data Deposit API (SWORD v2) -->
 </web-app>
diff --git a/src/main/webapp/dataset-citation.xhtml b/src/main/webapp/dataset-citation.xhtml
index 9baced25be0..b42dd5e563f 100644
--- a/src/main/webapp/dataset-citation.xhtml
+++ b/src/main/webapp/dataset-citation.xhtml
@@ -33,19 +33,13 @@
                     </button>
                     <ul class="dropdown-menu">
                         <li>
-                            <a jsf:id="endNoteLink" jsf:action="#{DatasetPage.fileDownloadService.downloadDatasetCitationXML(DatasetPage.dataset)}" >
-                                #{bundle['dataset.cite.downloadBtn.xml']}
-                            </a>
+                            <h:commandLink id="endNoteLink" value="#{bundle['dataset.cite.downloadBtn.xml']}" action="#{DatasetPage.fileDownloadService.downloadDatasetCitationXML(DatasetPage.dataset)}"/>
                         </li>
                         <li>
-                            <a jsf:id="risLink" jsf:actionListener="#{DatasetPage.fileDownloadService.downloadDatasetCitationRIS(DatasetPage.dataset)}">
-                                #{bundle['dataset.cite.downloadBtn.ris']}
-                            </a>
+                            <h:commandLink id="risLink" value="#{bundle['dataset.cite.downloadBtn.ris']}" action="#{DatasetPage.fileDownloadService.downloadDatasetCitationRIS(DatasetPage.dataset)}"/>
                         </li>
                         <li>
-                            <a jsf:id="bibLink" jsf:actionListener="#{DatasetPage.fileDownloadService.downloadDatasetCitationBibtex(DatasetPage.dataset)}" target="_blank">
-                                #{bundle['dataset.cite.downloadBtn.bib']}
-                            </a>
+                            <h:commandLink id="bibLink" value="#{bundle['dataset.cite.downloadBtn.bib']}" action="#{DatasetPage.fileDownloadService.downloadDatasetCitationBibtex(DatasetPage.dataset)}" target="_blank"/>
                         </li>
                     </ul>
                 </div>
diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml
index 1a049331ae4..5fac8241f13 100644
--- a/src/main/webapp/editFilesFragment.xhtml
+++ b/src/main/webapp/editFilesFragment.xhtml
@@ -903,7 +903,7 @@
                                           filter="false">
                         <f:selectItems value="#{EditDatafilesPage.tabFileTags}" />
                         <p:ajax event="toggleSelect" listener="#{EditDatafilesPage.handleTabularTagsSelection}" update="tabularDataTags" />
-                        <p:ajax event="change" listener="#{EditDatafilesPage.TabularTagsSelection}" update="tabularDataTags" />
+                        <p:ajax event="change" listener="#{EditDatafilesPage.handleTabularTagsSelection}" update="tabularDataTags" />
                     </p:selectCheckboxMenu>
                     <p:message for="tabularDataTags" display="text" />
                 </div>
diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml
index 4021ad7bc65..f28efc47705 100644
--- a/src/main/webapp/file-download-button-fragment.xhtml
+++ b/src/main/webapp/file-download-button-fragment.xhtml
@@ -74,7 +74,7 @@
                          styleClass="btn-download"
                          process="@this"
                          disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
-                         action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'GlobusTransfer')}"
+                         actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'GlobusTransfer')}"
                          update="@widgetVar(downloadPopup)" oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
                 <f:setPropertyActionListener target="#{fileMetadataForAction}" value="#{fileMetadata}" />
                 <!-- guest book or terms of use, etc. enabled - open "download popup" first: -->
@@ -101,7 +101,7 @@
                          styleClass="btn-download"
                          process="@this"
                          disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
-                         action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'package')}"
+                         actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'package')}"
                          update="@widgetVar(downloadPopup)" oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
                 <f:actionListener binding="#{packagePopupFragmentBean.setFileMetadata(fileMetadata)}" /> 
                 <!-- package data file: -->
@@ -123,7 +123,7 @@
                          styleClass="btn-download"
                          process="@this"
                          disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
-                         action="#{guestbookResponseService.modifyDatafile(guestbookResponse, fileMetadata)}"
+                         actionListener="#{guestbookResponseService.modifyDatafile(guestbookResponse, fileMetadata)}"
                          update="@widgetVar(downloadPopup)" oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
                 <f:setPropertyActionListener target="#{fileMetadataForAction}" value="#{fileMetadata}" />
                 <!-- guest book or terms of use, etc. enabled - open "download popup" first: -->
@@ -141,7 +141,7 @@
                 </p:commandLink>
                 <p:commandLink styleClass="highlightBold btn-download" rendered="#{downloadPopupRequired}"
                                process="@this"
-                               action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'bundle' )}"
+                               actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'bundle' )}"
                                update="@widgetVar(downloadPopup)"
                                oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
                     #{bundle['file.downloadBtn.format.all']}
@@ -161,7 +161,7 @@
                 <p:commandLink styleClass="btn-download" rendered="#{downloadPopupRequired and !(fileMetadata.dataFile.originalFormatLabel == 'UNKNOWN')}"
                                process="@this"
                                disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
-                               action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'original' )}"
+                               actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'original' )}"
                                update="@widgetVar(downloadPopup)"
                                oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
                     <f:setPropertyActionListener target="#{fileMetadataForAction}" value="#{fileMetadata}" />
@@ -178,7 +178,7 @@
                     #{bundle['file.downloadBtn.format.tab']}
                 </p:commandLink>
                 <p:commandLink styleClass="btn-download" rendered="#{downloadPopupRequired}"
-                               action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'tab' )}"
+                               actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'tab' )}"
                                disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
                                update="@widgetVar(downloadPopup)"
                                oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
@@ -197,7 +197,7 @@
                     <p:commandLink styleClass="btn-download" rendered="#{downloadPopupRequired}"
                                    process="@this"
                                    disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
-                                   action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'RData' )}"
+                                   actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'RData' )}"
                                    update="@widgetVar(downloadPopup)"
                                    oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
                         <f:setPropertyActionListener target="#{fileMetadataForAction}" value="#{fileMetadata}" />
@@ -224,7 +224,7 @@
             <p:commandLink styleClass="btn-download" rendered="#{downloadPopupRequired}"
                            process="@this"
                            disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
-                           action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'var' )}"
+                           actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'var' )}"
                            update="@widgetVar(downloadPopup)"
                            oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
                 #{bundle['file.downloadBtn.format.var']}
@@ -311,7 +311,7 @@
                 </p:commandLink>
                 <!--The modifyDatafileAndFormat method below was added because on the dataset page, "tool" is null in the popup so we store it in the guestbookResponse because we know we'll need it later in the popup.-->
                 <p:commandLink rendered="#{downloadPopupRequired}"
-                               action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'externalTool', tool)}"
+                               actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'externalTool', tool)}"
                                styleClass="btn-explore #{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}"
                                disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload)}"
                                process="@this"
@@ -340,7 +340,7 @@
                 </p:commandLink>
                 <!--The modifyDatafileAndFormat method below was added because on the dataset page, "tool" is null in the popup so we store it in the guestbookResponse because we know we'll need it later in the popup.-->
                 <p:commandLink rendered="#{downloadPopupRequired}"
-                               action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'externalTool', tool)}"
+                               actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'externalTool', tool)}"
                                styleClass="btn-query #{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}"
                                disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload)}"
                                process="@this"
diff --git a/src/main/webapp/file-download-popup-fragment.xhtml b/src/main/webapp/file-download-popup-fragment.xhtml
index e1020c85e69..632c2a827ef 100644
--- a/src/main/webapp/file-download-popup-fragment.xhtml
+++ b/src/main/webapp/file-download-popup-fragment.xhtml
@@ -8,6 +8,8 @@
                 xmlns:jsf="http://xmlns.jcp.org/jsf"
                 xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs">
 
+    <ui:fragment rendered="#{DatasetPage.editMode != 'CREATE'}">
+
         <o:importFunctions type="edu.harvard.iq.dataverse.util.MarkupChecker" />
         <p:focus context="guestbookUIFragment"/>
         <p class="help-block">
@@ -51,6 +53,7 @@
                 </p>
             </div>
         </div>
+        
 
         <ui:fragment rendered="#{empty workingVersion.termsOfUseAndAccess.license}">
             <div class="form-group"
@@ -297,4 +300,5 @@
                 #{bundle.cancel}
             </button>
         </div>
+    </ui:fragment>
 </ui:composition>
\ No newline at end of file
diff --git a/src/main/webapp/file-edit-button-fragment.xhtml b/src/main/webapp/file-edit-button-fragment.xhtml
index 8531f413b38..4dac1613266 100644
--- a/src/main/webapp/file-edit-button-fragment.xhtml
+++ b/src/main/webapp/file-edit-button-fragment.xhtml
@@ -25,7 +25,6 @@
     configureTools - for single file, list of configureTools for the file
     bean - the named value of the backing bean for the below method(s), also used by isFilePg param
     unrestrictFileAction - name of the method on the above bean to call for unrestrict (method must take a boolean)    
-    editFileAction - for selected files, name of method on the above bean to send for edit metadata
     refreshTagsPopoupAction - for selected files, name of method on the above bean to refresh tags popup
     -->            
     </ui:remove>
@@ -38,7 +37,7 @@
         <p:commandLink onclick="if (!(#{fileMetadata!=null} || testFilesSelected()))
                     return false;"                        
                        oncomplete="if(#{fileMetadata!=null}) window.location.assign('/editdatafiles.xhtml?selectedFileIds=#{fileMetadata.dataFile.id}&#38;datasetId=#{fileMetadata.datasetVersion.dataset.id}#{isFilePg?'&#38;referrer=FILE':''}&#38;version=#{fileMetadata.datasetVersion.version}')"
-                       action="#{bean[editFileAction]()}">
+                       action="#{bean.editFileMetadata()}">
             <h:outputText value="#{bundle['file.metadata']}"/>
         </p:commandLink>
     </li>
@@ -96,8 +95,7 @@
             <p:commandLink      update="@([id$=fileEmbargoPopup])" 
                                 onclick="if (!(#{fileMetadata!=null} || testFilesSelected()))
                                             return false;" 
-                                oncomplete="PF('fileEmbargoPopup').show();"
-                                action="#{bean[refreshEmbargoPopoupAction]()}">
+                                oncomplete="PF('fileEmbargoPopup').show();">
                                 <f:setPropertyActionListener target="#{fileMetadataForAction}" value="#{fileMetadata}" />
                 <h:outputText value="#{bundle['file.embargo']}"/>
             </p:commandLink> 
@@ -136,4 +134,4 @@
         </ui:include>		
     </ui:fragment>
   
-</ui:composition>
\ No newline at end of file
+</ui:composition>
diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml
index 626a2580f98..5a60afef60c 100644
--- a/src/main/webapp/file.xhtml
+++ b/src/main/webapp/file.xhtml
@@ -110,19 +110,22 @@
                                                     </button>
                                                     <ul class="dropdown-menu">
                                                         <li>
-                                                            <a jsf:id="endNoteLink-2" jsf:action="#{FilePage.fileDownloadService.downloadCitationXML(FilePage.fileMetadata, null, FilePage.fileMetadata.dataFile.isIdentifierRegistered())}" >
-                                                                #{bundle['dataset.cite.downloadBtn.xml']}
-                                                            </a>
+                                                            <h:commandLink
+                                                                id="endNoteLink-2" value="#{bundle['dataset.cite.downloadBtn.xml']}"
+                                                                action="#{FilePage.fileDownloadService.downloadCitationXML(FilePage.fileMetadata, null, FilePage.fileMetadata.dataFile.isIdentifierRegistered())}"
+                                                            />
                                                         </li>
                                                         <li>
-                                                            <a jsf:id="risLink-2" jsf:actionListener="#{FilePage.fileDownloadService.downloadCitationRIS(FilePage.fileMetadata, null, FilePage.fileMetadata.dataFile.isIdentifierRegistered())}">
-                                                                #{bundle['dataset.cite.downloadBtn.ris']}
-                                                            </a>
+                                                            <h:commandLink
+                                                                id="risLink-2" value="#{bundle['dataset.cite.downloadBtn.ris']}"
+                                                                action="#{FilePage.fileDownloadService.downloadCitationRIS(FilePage.fileMetadata, null, FilePage.fileMetadata.dataFile.isIdentifierRegistered())}"
+                                                            />
                                                         </li>
                                                         <li>
-                                                            <a jsf:id="bibLink-2" jsf:actionListener="#{FilePage.fileDownloadService.downloadCitationBibtex(FilePage.fileMetadata, null, FilePage.fileMetadata.dataFile.isIdentifierRegistered())}" target="_blank">
-                                                                #{bundle['dataset.cite.downloadBtn.bib']}
-                                                            </a>
+                                                            <h:commandLink
+                                                                id="bibLink-2" value="#{bundle['dataset.cite.downloadBtn.bib']}" target="_blank"
+                                                                action="#{FilePage.fileDownloadService.downloadCitationBibtex(FilePage.fileMetadata, null, FilePage.fileMetadata.dataFile.isIdentifierRegistered())}"
+                                                            />
                                                         </li>
                                                     </ul>
                                                 </div>
@@ -156,19 +159,22 @@
                                                     </button>
                                                     <ul class="dropdown-menu">
                                                         <li>
-                                                            <a jsf:id="endNoteLink" jsf:action="#{FilePage.fileDownloadService.downloadDatasetCitationXML(FilePage.fileMetadata.datasetVersion.dataset)}" >
-                                                                #{bundle['dataset.cite.downloadBtn.xml']}
-                                                            </a>
+                                                            <h:commandLink
+                                                                id="endNoteLink" value="#{bundle['dataset.cite.downloadBtn.xml']}"
+                                                                action="#{FilePage.fileDownloadService.downloadDatasetCitationXML(FilePage.fileMetadata.datasetVersion.dataset)}"
+                                                            />
                                                         </li>
                                                         <li>
-                                                            <a jsf:id="risLink" jsf:actionListener="#{FilePage.fileDownloadService.downloadDatasetCitationRIS(FilePage.fileMetadata.datasetVersion.dataset)}">
-                                                                #{bundle['dataset.cite.downloadBtn.ris']}
-                                                            </a>
+                                                            <h:commandLink
+                                                                id="risLink" value="#{bundle['dataset.cite.downloadBtn.ris']}"
+                                                                action="#{FilePage.fileDownloadService.downloadDatasetCitationRIS(FilePage.fileMetadata.datasetVersion.dataset)}"
+                                                            />
                                                         </li>
                                                         <li>
-                                                            <a jsf:id="bibLink" jsf:actionListener="#{FilePage.fileDownloadService.downloadDatasetCitationBibtex(FilePage.fileMetadata.datasetVersion.dataset)}" target="_blank">
-                                                                #{bundle['dataset.cite.downloadBtn.bib']}
-                                                            </a>
+                                                            <h:commandLink
+                                                                id="bibLink" value="#{bundle['dataset.cite.downloadBtn.bib']}" target="_blank"
+                                                                action="#{FilePage.fileDownloadService.downloadDatasetCitationBibtex(FilePage.fileMetadata.datasetVersion.dataset)}"
+                                                            />
                                                         </li>
                                                     </ul>
                                                 </div>
diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml
index cdb6af88cd3..6d3c6062ec7 100644
--- a/src/main/webapp/filesFragment.xhtml
+++ b/src/main/webapp/filesFragment.xhtml
@@ -421,7 +421,6 @@
                             <ui:param name="fileMetadataForAction" value="#{DatasetPage.fileMetadataForAction}"/>
                             <ui:param name="bean" value="#{DatasetPage}"/>
                             <ui:param name="unrestrictFileAction" value="restrictFiles"/>
-                            <ui:param name="editFileAction" value="editFileMetadata"/>
                             <ui:param name="refreshTagsPopoupAction" value="refreshTagsPopUp"/>                          
                         </ui:include>  
                     </ul>
diff --git a/src/main/webapp/resources/css/structure.css b/src/main/webapp/resources/css/structure.css
index f2eaad4b2c3..470c07d4534 100644
--- a/src/main/webapp/resources/css/structure.css
+++ b/src/main/webapp/resources/css/structure.css
@@ -67,7 +67,7 @@ tr.ui-state-highlight, .ui-widget-content tr.ui-state-highlight, .ui-widget-head
 tr.ui-state-highlight label, .ui-widget-content tr.ui-state-highlight label, .ui-widget-header tr.ui-state-highlight label {color: #333333;}
 tr.ui-state-highlight a:not(.btn), .ui-widget-content tr.ui-state-highlight a:not(.btn), .ui-widget-header tr.ui-state-highlight a:not(.btn) {color: #428bca;}
 tr.ui-state-highlight .ui-icon {
-    background-image: url("/javax.faces.resource/images/ui-icons_333333_256x240.png.xhtml?ln=primefaces-bootstrap");
+    background-image: url("/jakarta.faces.resource/images/ui-icons_333333_256x240.png.xhtml?ln=primefaces-bootstrap");
 }
 td.col-select-width, th.col-select-width {width:36px;}
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBeanTest.java
index ad97eba137c..30bd260eb1b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBeanTest.java
@@ -2,16 +2,14 @@
 
 import java.util.Arrays;
 import java.util.List;
-import javax.persistence.EntityManager;
-import javax.persistence.Query;
-import javax.persistence.TypedQuery;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.TypedQuery;
 
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
-import org.junit.Before;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.extension.ExtendWith;
 import org.mockito.ArgumentMatchers;
-import org.mockito.Matchers;
 
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
@@ -26,7 +24,7 @@ public class AuxiliaryFileServiceBeanTest {
     List<String> types;
     DataFile dataFile;
 
-    @Before
+    @BeforeEach
     public void setup() {
         svc = new AuxiliaryFileServiceBean();
         svc.em = mock(EntityManager.class);
diff --git a/src/test/java/edu/harvard/iq/dataverse/CartTest.java b/src/test/java/edu/harvard/iq/dataverse/CartTest.java
index 7b7ea3331a9..e847cfb3346 100644
--- a/src/test/java/edu/harvard/iq/dataverse/CartTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/CartTest.java
@@ -1,15 +1,15 @@
 package edu.harvard.iq.dataverse;
 
-import static org.junit.Assert.fail;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
 import java.util.List;
 import java.util.Map.Entry;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 public class CartTest {
 
@@ -17,14 +17,14 @@ public class CartTest {
     private String title;
     private String persistentId;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         this.cart = new Cart();
         this.title = "title";
         this.persistentId = "persistentId";
     }
 
-    @After
+    @AfterEach
     public void tearDwon() {
         this.cart = null;
         this.title = null;
diff --git a/src/test/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBeanTest.java
index edeeea288bf..53add343f4f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBeanTest.java
@@ -4,12 +4,14 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
 import org.mockito.Mockito;
-import org.mockito.junit.MockitoJUnitRunner;
+import org.mockito.junit.jupiter.MockitoExtension;
 
 import java.util.Arrays;
 import java.util.List;
@@ -19,7 +21,7 @@
  * 
  * @author adaybujeda
  */
-@RunWith(MockitoJUnitRunner.class)
+@ExtendWith(MockitoExtension.class)
 public class DataFileCategoryServiceBeanTest {
 
     @Mock
diff --git a/src/test/java/edu/harvard/iq/dataverse/DataFileServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/DataFileServiceBeanTest.java
index 136916cf449..ab3d0f8ef55 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DataFileServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DataFileServiceBeanTest.java
@@ -1,8 +1,9 @@
 package edu.harvard.iq.dataverse;
 
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  * Test that the DataFileServiceBean classifies DataFiles correctly.
@@ -27,7 +28,7 @@ public DataFileServiceBeanTest() {
     private DataFileServiceBean dataFileServiceBean;
             
     
-    @Before
+    @BeforeEach
     public void setUp() {
         fileWoContentType = createDataFile(null);
         fileWithBogusContentType = createDataFile("foo/bar");
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetAuthorTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetAuthorTest.java
index fddb95eda9e..fa225de18ba 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetAuthorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetAuthorTest.java
@@ -1,46 +1,26 @@
 package edu.harvard.iq.dataverse;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
 
-import java.util.Arrays;
-import java.util.Collection;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-
-@RunWith(Parameterized.class)
 public class DatasetAuthorTest {
 
-    public String idType;
-    public String idValue;
-    public String expectedIdentifierAsUrl;
-
-    public DatasetAuthorTest(String idType, String idValue, String expectedIdentifierAsUrl) {
-        this.idType = idType;
-        this.idValue = idValue;
-        this.expectedIdentifierAsUrl = expectedIdentifierAsUrl;
-    }
-
-    @Parameters
-    public static Collection<String[]> parameters() {
-        return Arrays.asList(new String[][] {
-            { "ORCID", "0000-0002-1825-0097", "https://orcid.org/0000-0002-1825-0097" },
-            { "ISNI", "0000000121032683", "http://www.isni.org/isni/0000000121032683"},
-            { "LCNA", "n82058243", "http://id.loc.gov/authorities/names/n82058243" },
-            { "VIAF", "172389567", "https://viaf.org/viaf/172389567" },
-            { "GND", "4079154-3", "https://d-nb.info/gnd/4079154-3" },
-            { "ResearcherID", "634082", "https://publons.com/researcher/634082/" },
-            { "ResearcherID", "AAW-9289-2021", "https://publons.com/researcher/AAW-9289-2021/" },
-            { "ResearcherID", "J-9733-2013", "https://publons.com/researcher/J-9733-2013/" },
-            { "ScopusID", "6602344670", "https://www.scopus.com/authid/detail.uri?authorId=6602344670" },
-            { null, null, null, },
-        });
-    }
-
-    @Test
-    public void getIdentifierAsUrl() {
+    @ParameterizedTest
+    @CsvSource(value = {
+        "ORCID,0000-0002-1825-0097,https://orcid.org/0000-0002-1825-0097",
+        "ISNI,0000000121032683,http://www.isni.org/isni/0000000121032683",
+        "LCNA,n82058243,http://id.loc.gov/authorities/names/n82058243",
+        "VIAF,172389567,https://viaf.org/viaf/172389567",
+        "GND,4079154-3,https://d-nb.info/gnd/4079154-3",
+        "ResearcherID,634082,https://publons.com/researcher/634082/",
+        "ResearcherID,AAW-9289-2021,https://publons.com/researcher/AAW-9289-2021/",
+        "ResearcherID,J-9733-2013,https://publons.com/researcher/J-9733-2013/",
+        "ScopusID,6602344670,https://www.scopus.com/authid/detail.uri?authorId=6602344670",
+        "NULL,NULL,NULL"
+    }, nullValues = "NULL")
+    void getIdentifierAsUrl(String idType, String idValue, String expectedIdentifierAsUrl) {
         DatasetAuthor datasetAuthor = new DatasetAuthor();
         if (idType !=null && idValue != null) {
             datasetAuthor.setIdType(idType);
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java
index ed17bd229d9..a235c9b0061 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java
@@ -6,16 +6,13 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.search.SolrField;
-import java.util.Collection;
-import java.util.List;
-import java.util.Set;
-import javax.faces.model.SelectItem;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -26,19 +23,19 @@ public class DatasetFieldTypeTest {
     public DatasetFieldTypeTest() {
     }
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
     
-    @Before
+    @BeforeEach
     public void setUp() {
     }
     
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValidatorTest.java
index 99482dd9401..659e42a68c4 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValidatorTest.java
@@ -5,13 +5,14 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.validation.ConstraintValidatorContext;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import jakarta.validation.ConstraintValidatorContext;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
+
 import org.mockito.Mockito;
 
 /**
@@ -25,25 +26,26 @@ public class DatasetFieldValidatorTest {
     public DatasetFieldValidatorTest() {
     }
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
     
-    @Before
+    @BeforeEach
     public void setUp() {
     }
     
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
 
     /**
      * Test of isValid method, of class DatasetFieldValidator.
+     * TODO: this should be converted into one or two ParameterizedTest methods, potentially including a DisplayNameGenerator
      */
     @Test
     public void testIsValid() {
@@ -84,7 +86,7 @@ private void testPrimitiveDatasetField(String test, boolean required, String val
         testDatasetField.setSingleValue(value);
         
         DatasetFieldValidator datasetFieldValidator = new DatasetFieldValidator();
-        assertEquals( test, expectedOutcome, datasetFieldValidator.isValid(testDatasetField, constraintValidatorContext));
+        assertEquals(expectedOutcome, datasetFieldValidator.isValid(testDatasetField, constraintValidatorContext), test);
        
     }
       
@@ -120,7 +122,7 @@ private void testCompoundDatasetField(String test, boolean requiredParent, boole
         
 
         DatasetFieldValidator datasetFieldValidator = new DatasetFieldValidator();
-        assertEquals( test, expectedOutcome, datasetFieldValidator.isValid(child1DatasetField, constraintValidatorContext));
+        assertEquals(expectedOutcome, datasetFieldValidator.isValid(child1DatasetField, constraintValidatorContext), test);
     }
     
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java
index ceaa69ade4e..f4af88818a5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java
@@ -7,10 +7,10 @@
 
 import java.util.Set;
 import java.util.regex.Pattern;
-import javax.validation.ConstraintValidatorContext;
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
+import jakarta.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
 
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.params.ParameterizedTest;
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java
index 45e47460ffe..2153a336303 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java
@@ -3,10 +3,11 @@
 import edu.harvard.iq.dataverse.DatasetVersion.VersionState;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -23,38 +24,38 @@ public class DatasetTest {
     @Test
     public void testIsLockedFor() {
         Dataset sut = new Dataset();
-        assertFalse( "Initially verify that the dataset is not locked because data being ingested", sut.isLockedFor(DatasetLock.Reason.Ingest) );
+        assertFalse(sut.isLockedFor(DatasetLock.Reason.Ingest), "Initially verify that the dataset is not locked because data being ingested");
 
         DatasetLock dl = new DatasetLock(DatasetLock.Reason.Ingest, MocksFactory.makeAuthenticatedUser("jane", "doe"));
         sut.addLock(dl);
-        assertTrue( "Verify that the dataset now has an ingest lock", sut.isLockedFor(DatasetLock.Reason.Ingest) );
-        assertFalse( "Verify that the dataset does not have a workflow lock", sut.isLockedFor(DatasetLock.Reason.Workflow) );
+        assertTrue(sut.isLockedFor(DatasetLock.Reason.Ingest), "Verify that the dataset now has an ingest lock");
+        assertFalse(sut.isLockedFor(DatasetLock.Reason.Workflow), "Verify that the dataset does not have a workflow lock");
     }
     
     @Test
     public void testLocksManagement() {
         Dataset sut = new Dataset();
-        assertFalse( "Initially verify that the dataset is not locked", sut.isLocked() );
+        assertFalse(sut.isLocked(), "Initially verify that the dataset is not locked");
         
         DatasetLock dlIngest = new DatasetLock(DatasetLock.Reason.Ingest, MocksFactory.makeAuthenticatedUser("jane", "doe"));
         dlIngest.setId(MocksFactory.nextId());
         sut.addLock(dlIngest);
-        assertTrue( "After adding an ingest lock, verify that the dataset is locked", sut.isLocked() );
+        assertTrue(sut.isLocked(), "After adding an ingest lock, verify that the dataset is locked");
 
         final DatasetLock dlInReview = new DatasetLock(DatasetLock.Reason.InReview, MocksFactory.makeAuthenticatedUser("jane", "doe"));
         dlInReview.setId(MocksFactory.nextId());
         sut.addLock(dlInReview);
-        assertEquals( "After adding a review lock, verify that the dataset is locked by two locks", 2, sut.getLocks().size() );
+        assertEquals(2, sut.getLocks().size(), "After adding a review lock, verify that the dataset is locked by two locks");
         
         DatasetLock retrievedDl = sut.getLockFor(DatasetLock.Reason.Ingest);
         assertEquals( dlIngest, retrievedDl );
         sut.removeLock(dlIngest);
-        assertNull( "After removing the ingest lock, verify that the dataset does not have any ingest locks", sut.getLockFor(DatasetLock.Reason.Ingest) );
+        assertNull(sut.getLockFor(DatasetLock.Reason.Ingest), "After removing the ingest lock, verify that the dataset does not have any ingest locks");
         
-        assertTrue( "After removing the ingest lock, verify that the dataset is still locked (review lock)", sut.isLocked() );
+        assertTrue(sut.isLocked(), "After removing the ingest lock, verify that the dataset is still locked (review lock)");
         
         sut.removeLock(dlInReview);
-        assertFalse( "After removing the review lock, verify that the dataset is not locked anymore", sut.isLocked() );
+        assertFalse(sut.isLocked(), "After removing the review lock, verify that the dataset is not locked anymore");
         
     }
 
@@ -78,7 +79,7 @@ public void testLocksManagement() {
     private DatasetVersion draftVersion;
     private DatasetVersion releasedVersion;
 
-    @Before
+    @BeforeEach
     public void before() {
         this.archivedVersion = new DatasetVersion();
         this.archivedVersion.setVersionState(VersionState.ARCHIVED);
@@ -93,7 +94,7 @@ public void before() {
         this.releasedVersion.setVersionState(VersionState.RELEASED);
     }
 
-    @After
+    @AfterEach
     public void after() {
         this.archivedVersion = null;
         this.deaccessionedVersion = null;
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionServiceBeanTest.java
index 7c608888b7b..424ff54fe02 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionServiceBeanTest.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.mockito.Mockito.mock;
 
 import java.util.Arrays;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo;
@@ -18,13 +18,13 @@ public class DatasetVersionServiceBeanTest {
 
   private DatasetVersionServiceBean datasetVersionServiceBean;
 
-  @Before
+  @BeforeEach
   public void setUp() {
     this.datasetVersionServiceBean = new DatasetVersionServiceBean();
     
   }
 
-  @After
+  @AfterEach
   public void tearDown() {
     this.datasetVersionServiceBean = null;
   }
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java
index a8e011d0036..4cd6c4dfaa7 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java
@@ -14,10 +14,10 @@
 import java.util.Date;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertTrue;
diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacetTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacetTest.java
index 7ae2d26a113..e0bcfb2369d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacetTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacetTest.java
@@ -3,7 +3,7 @@
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseTest.java
index cb0561dd0f4..4e2bd5b3c2d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DataverseTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DataverseTest.java
@@ -3,8 +3,8 @@
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import java.util.Arrays;
 import java.util.List;
@@ -18,7 +18,7 @@ public class DataverseTest {
     private Dataverse OWNER;
     private List<DataverseMetadataBlockFacet> OWNER_METADATABLOCKFACETS;
 
-    @Before
+    @BeforeEach
     public void beforeEachTest() {
         OWNER = new Dataverse();
         OWNER.setId(MocksFactory.nextId());
diff --git a/src/test/java/edu/harvard/iq/dataverse/EditDataFilesPageHelperTest.java b/src/test/java/edu/harvard/iq/dataverse/EditDataFilesPageHelperTest.java
index c95f7f105fa..39d43fec191 100644
--- a/src/test/java/edu/harvard/iq/dataverse/EditDataFilesPageHelperTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/EditDataFilesPageHelperTest.java
@@ -3,12 +3,12 @@
 import edu.harvard.iq.dataverse.util.file.CreateDataFileResult;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
 import org.mockito.Mockito;
-import org.mockito.junit.MockitoJUnitRunner;
+import org.mockito.junit.jupiter.MockitoExtension;
 
 import java.util.Arrays;
 import java.util.Collections;
@@ -19,7 +19,7 @@
  *
  * @author adaybujeda
  */
-@RunWith(MockitoJUnitRunner.class)
+@ExtendWith(MockitoExtension.class)
 public class EditDataFilesPageHelperTest {
 
     private static final String FILENAME = UUID.randomUUID().toString();
diff --git a/src/test/java/edu/harvard/iq/dataverse/ExternalIdentifierTest.java b/src/test/java/edu/harvard/iq/dataverse/ExternalIdentifierTest.java
index c14d2e4086e..dbd732d2e55 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ExternalIdentifierTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ExternalIdentifierTest.java
@@ -1,11 +1,9 @@
 package edu.harvard.iq.dataverse;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
-import java.util.regex.Pattern;
-
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class ExternalIdentifierTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/FileDirectoryNameValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/FileDirectoryNameValidatorTest.java
index 9d49dcdb070..5ff74aea603 100644
--- a/src/test/java/edu/harvard/iq/dataverse/FileDirectoryNameValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/FileDirectoryNameValidatorTest.java
@@ -1,39 +1,25 @@
 package edu.harvard.iq.dataverse;
 
-import java.util.Arrays;
-import java.util.Collection;
-import org.junit.Test;
-import static org.junit.Assert.*;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
 
-@RunWith(Parameterized.class)
-public class FileDirectoryNameValidatorTest {
-
-    public boolean isValid;
-    public String fileDirectoryName;
-
-    public FileDirectoryNameValidatorTest(boolean isValid, String fileDirectoryName) {
-        this.isValid = isValid;
-        this.fileDirectoryName = fileDirectoryName;
-    }
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
-    @Parameterized.Parameters
-    public static Collection<Object[]> parameters() {
-        return Arrays.asList(new Object[][]{
-            {true, "foobar"},
-            // The leading "-" gets stripped.
-            {true, "-foobar"},
-            {true, "_foobar"},
-            {true, "foobar_"},
-            {true, "folder/sub"},
-            {true, "folder///sub"},
-            {true, "folder///sub/third"},
-            {false, "f**bar"},});
-    }
+public class FileDirectoryNameValidatorTest {
 
-    @Test
-    public void testIsFileDirectoryNameValid() {
+    @ParameterizedTest
+    @CsvSource({
+        "true,foobar",
+        // The leading "-" gets stripped.
+        "true,-foobar",
+        "true,_foobar",
+        "true,foobar_",
+        "true,folder/sub",
+        "true,folder///sub",
+        "true,folder///sub/third",
+        "false,f**bar"
+    })
+    public void testIsFileDirectoryNameValid(boolean isValid, String fileDirectoryName) {
         assertEquals(isValid, FileDirectoryNameValidator.isFileDirectoryNameValid(fileDirectoryName, null));
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/GlobalIdTest.java b/src/test/java/edu/harvard/iq/dataverse/GlobalIdTest.java
index 2b60f2a2864..394f08c6e93 100644
--- a/src/test/java/edu/harvard/iq/dataverse/GlobalIdTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/GlobalIdTest.java
@@ -1,23 +1,21 @@
 package edu.harvard.iq.dataverse;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
-
-import org.junit.Ignore;
-import org.junit.Rule;
-import org.junit.rules.ExpectedException;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
 import edu.harvard.iq.dataverse.pidproviders.PidUtil;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
 /**
  *
  * @author rmp553
  */
 public class GlobalIdTest {
 
-    @Rule
-    public ExpectedException exception = ExpectedException.none();
-
     @Test
     public void testValidDOI() {
         System.out.println("testValidDOI");
@@ -68,34 +66,34 @@ public void testInject() {
     }
 
     @Test
-    @Ignore /* Could now add a 'doy' protocol so the test would have to check against registered PIDProviders (currently Beans)*/
+    @Disabled /* Could now add a 'doy' protocol so the test would have to check against registered PIDProviders (currently Beans)*/
     public void testUnknownProtocol() {
         System.out.println("testUnknownProtocol");
 
         String badProtocol = "doy:10.5072/FK2/BYM3IW";
-
-        exception.expect(IllegalArgumentException.class);
-        exception.expectMessage("Failed to parse identifier: " + badProtocol);
+        
+        //exception.expect(IllegalArgumentException.class);
+        //exception.expectMessage("Failed to parse identifier: " + badProtocol);
         //new GlobalId(badProtocol);
     }
 
     @Test
-    @Ignore /* Could now change parsing rules so the test would have to check against registered PIDProviders (currently Beans)*/
+    @Disabled /* Could now change parsing rules so the test would have to check against registered PIDProviders (currently Beans)*/
     public void testBadIdentifierOnePart() {
         System.out.println("testBadIdentifierOnePart");
 
-        exception.expect(IllegalArgumentException.class);
-        exception.expectMessage("Failed to parse identifier: 1part");
+        //exception.expect(IllegalArgumentException.class);
+        //exception.expectMessage("Failed to parse identifier: 1part");
         //new GlobalId("1part");
     }
 
     @Test
-    @Ignore /* Could now change parsing rules so the test would have to check against registered PIDProviders (currently Beans)*/
+    @Disabled /* Could now change parsing rules so the test would have to check against registered PIDProviders (currently Beans)*/
     public void testBadIdentifierTwoParts() {
         System.out.println("testBadIdentifierTwoParts");
 
-        exception.expect(IllegalArgumentException.class);
-        exception.expectMessage("Failed to parse identifier: doi:2part/blah");
+        //exception.expect(IllegalArgumentException.class);
+        //exception.expectMessage("Failed to parse identifier: doi:2part/blah");
         //new GlobalId("doi:2part/blah");
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanTest.java
index 025e3fbb3f7..32bf9702ee7 100644
--- a/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanTest.java
@@ -7,11 +7,11 @@
 import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.CsvSource;
 
-import javax.mail.internet.InternetAddress;
+import jakarta.mail.internet.InternetAddress;
 
 import java.io.UnsupportedEncodingException;
 
-import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 class MailServiceBeanTest {
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/MetadataBlockTest.java b/src/test/java/edu/harvard/iq/dataverse/MetadataBlockTest.java
index 85aaa37bb30..8644de89709 100644
--- a/src/test/java/edu/harvard/iq/dataverse/MetadataBlockTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/MetadataBlockTest.java
@@ -3,7 +3,7 @@
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.util.UUID;
diff --git a/src/test/java/edu/harvard/iq/dataverse/NonEssentialTests.java b/src/test/java/edu/harvard/iq/dataverse/NonEssentialTests.java
deleted file mode 100644
index 612904cbd26..00000000000
--- a/src/test/java/edu/harvard/iq/dataverse/NonEssentialTests.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package edu.harvard.iq.dataverse;
-
-/**
- * Tests annotated as non-essential will not be run by default on developers'
- * laptops but they will run on continuous integration platforms like Travis CI.
- * To work on one of these tests, you have to comment out the annotation.
- */
-public interface NonEssentialTests {
-
-}
diff --git a/src/test/java/edu/harvard/iq/dataverse/PermissionsWrapperTest.java b/src/test/java/edu/harvard/iq/dataverse/PermissionsWrapperTest.java
index 7ebc7e3c807..751a90fb447 100644
--- a/src/test/java/edu/harvard/iq/dataverse/PermissionsWrapperTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/PermissionsWrapperTest.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse;
 
-import static org.junit.Assert.assertFalse;
+import static org.junit.jupiter.api.Assertions.*;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.mockito.Mockito.mock;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import edu.harvard.iq.dataverse.authorization.users.GuestUser;
@@ -19,14 +19,14 @@ public class PermissionsWrapperTest {
 
     private PermissionsWrapper permissionWrapper;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         this.permissionWrapper = new PermissionsWrapper();
         this.permissionWrapper.permissionService = mock(PermissionServiceBean.class);
         this.permissionWrapper.dvRequestService = mock(DataverseRequestServiceBean.class);
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
         this.permissionWrapper = null;
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/PersistentIdentifierServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/PersistentIdentifierServiceBeanTest.java
index 2318ebc8f2f..542d00d0d78 100644
--- a/src/test/java/edu/harvard/iq/dataverse/PersistentIdentifierServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/PersistentIdentifierServiceBeanTest.java
@@ -10,22 +10,23 @@
 import edu.harvard.iq.dataverse.pidproviders.FakePidProviderServiceBean;
 import edu.harvard.iq.dataverse.pidproviders.PermaLinkPidProviderServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
 import org.mockito.Mockito;
 import org.mockito.MockitoAnnotations;
-import org.mockito.junit.MockitoJUnitRunner;
+import org.mockito.junit.jupiter.MockitoExtension;
 
-import static org.junit.Assert.*;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
  * @author michael
  */
-@RunWith(MockitoJUnitRunner.class)
+@ExtendWith(MockitoExtension.class)
 public class PersistentIdentifierServiceBeanTest {
     
     @Mock
@@ -42,7 +43,7 @@ public class PersistentIdentifierServiceBeanTest {
     
     CommandContext ctxt;
     
-    @Before
+    @BeforeEach
     public void setup() {
         MockitoAnnotations.initMocks(this);
         ctxt = new TestCommandContext(){
diff --git a/src/test/java/edu/harvard/iq/dataverse/RoleAssignmentTest.java b/src/test/java/edu/harvard/iq/dataverse/RoleAssignmentTest.java
index e4b33b83930..f8138537cd7 100644
--- a/src/test/java/edu/harvard/iq/dataverse/RoleAssignmentTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/RoleAssignmentTest.java
@@ -1,18 +1,18 @@
 package edu.harvard.iq.dataverse;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.RoleAssignee;
 import edu.harvard.iq.dataverse.authorization.users.GuestUser;
 import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 
-import org.junit.After;
-import org.junit.Before;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
 
 public class RoleAssignmentTest {
 
@@ -22,7 +22,7 @@ public class RoleAssignmentTest {
     private Dataset dataset;
     private String privateUrlToken;
 
-    @Before
+    @BeforeEach
     public void before() {
         this.dataverseRole = new DataverseRole();
         this.roleAssignee = GuestUser.get();
@@ -30,7 +30,7 @@ public void before() {
         this.privateUrlToken = "some-token";
     }
 
-    @After
+    @AfterEach
     public void after() {
         this.dataverseRole = null;
         this.roleAssignee = null;
diff --git a/src/test/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecordTest.java b/src/test/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecordTest.java
index 1726ea70114..6a965b17a16 100644
--- a/src/test/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecordTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecordTest.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse.actionlogging;
 
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.util.Date;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord.ActionType;
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord.Result;
@@ -16,7 +16,7 @@ public class ActionLogRecordTest {
 
     private ActionLogRecord referenceRecord;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         this.referenceRecord = new ActionLogRecord(ActionType.Admin, "subType1");
         this.referenceRecord.setEndTime(new Date());
@@ -25,7 +25,7 @@ public void setUp() {
         this.referenceRecord.setInfo("info1");
     }
 
-    @After
+    @AfterEach
     public void tearDwon() {
         this.referenceRecord = null;
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AbstractApiBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/api/AbstractApiBeanTest.java
index 3e088c184ad..c67dfeeadfa 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/AbstractApiBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/AbstractApiBeanTest.java
@@ -1,24 +1,22 @@
 package edu.harvard.iq.dataverse.api;
 
-import edu.harvard.iq.dataverse.util.MockResponse;
 import java.io.StringReader;
 import java.io.StringWriter;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import javax.json.JsonWriter;
-import javax.json.JsonWriterFactory;
-import javax.json.stream.JsonGenerator;
-import javax.ws.rs.core.Response;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import org.junit.Before;
-import org.junit.Test;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.json.JsonWriter;
+import jakarta.json.JsonWriterFactory;
+import jakarta.json.stream.JsonGenerator;
+import jakarta.ws.rs.core.Response;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 public class AbstractApiBeanTest {
 
@@ -26,7 +24,7 @@ public class AbstractApiBeanTest {
 
     AbstractApiBeanImpl sut;
 
-    @Before
+    @BeforeEach
     public void before() {
         sut = new AbstractApiBeanImpl();
     }
@@ -40,15 +38,15 @@ public void testParseBooleanOrDie_ok() throws Exception {
         assertFalse(sut.parseBooleanOrDie("0"));
         assertFalse(sut.parseBooleanOrDie("no"));
     }
-
-    @Test(expected = Exception.class)
-    public void testParseBooleanOrDie_invalid() throws Exception {
-        sut.parseBooleanOrDie("I'm not a boolean value!");
+    
+    @Test
+    void testParseBooleanOrDie_invalid() {
+        assertThrows(Exception.class, () -> sut.parseBooleanOrDie("I'm not a boolean value!"));
     }
 
     @Test
-    public void testFailIfNull_ok() throws Exception {
-        sut.failIfNull(sut, "");
+    void testFailIfNull_ok() {
+        assertDoesNotThrow(() -> sut.failIfNull(sut, ""));
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java
index d6aac80b435..606e8fa120e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java
@@ -5,30 +5,30 @@
  */
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.DataFile;
-import static edu.harvard.iq.dataverse.api.UtilIT.API_TOKEN_HTTP_HEADER;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import java.io.IOException;
 import java.util.zip.ZipInputStream;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+
+import org.hamcrest.MatcherAssert;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 import java.util.zip.ZipEntry;
 import java.io.ByteArrayOutputStream;
-import java.io.File;
 import java.io.InputStream;
-import java.nio.file.Path;
 import java.util.HashMap;
-import static javax.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import org.hamcrest.collection.IsMapContaining;
-import static junit.framework.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
+
+import static org.hamcrest.MatcherAssert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.CoreMatchers.not;
 
@@ -82,7 +82,7 @@ public class AccessIT {
     private static String testFileFromZipUploadWithFoldersChecksum3 = "00433ccb20111f9d40f0e5ab6fa8396f";
 
     
-    @BeforeClass
+    @BeforeAll
     public static void setUp() throws InterruptedException {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
         
@@ -124,7 +124,7 @@ public static void setUp() throws InterruptedException {
         tabFile2NameConvert = tabFile2Name.substring(0, tabFile2Name.indexOf(".dta")) + ".tab";
         String tab2PathToFile = "scripts/search/data/tabular/" + tabFile2Name;
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + tabFile2Name, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + tabFile2Name);
 
         Response tab2AddResponse = UtilIT.uploadFileViaNative(datasetId.toString(), tab2PathToFile, apiToken);
         tabFile2Id = JsonPath.from(tab2AddResponse.body().asString()).getInt("data.files[0].dataFile.id");
@@ -133,13 +133,13 @@ public static void setUp() throws InterruptedException {
         tabFile3NameRestrictedConvert = tabFile3NameRestricted.substring(0, tabFile3NameRestricted.indexOf(".dta")) + ".tab";
         String tab3PathToFile = "scripts/search/data/tabular/" + tabFile3NameRestricted;
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + tabFile3NameRestricted , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));     
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + tabFile3NameRestricted);
         
         Response tab3AddResponse = UtilIT.uploadFileViaNative(datasetId.toString(), tab3PathToFile, apiToken);
 
         tabFile3IdRestricted = JsonPath.from(tab3AddResponse.body().asString()).getInt("data.files[0].dataFile.id");
         
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + tabFile3NameRestricted , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + tabFile3NameRestricted);
         
         Response restrictResponse = UtilIT.restrictFile(tabFile3IdRestricted.toString(), true, apiToken);
         restrictResponse.prettyPrint();
@@ -158,11 +158,11 @@ public static void setUp() throws InterruptedException {
         String tab4PathToFile = "scripts/search/data/tabular/" + tabFile4NameUnpublished;
         Response tab4AddResponse = UtilIT.uploadFileViaNative(datasetId.toString(), tab4PathToFile, apiToken);
         tabFile4IdUnpublished = JsonPath.from(tab4AddResponse.body().asString()).getInt("data.files[0].dataFile.id");
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + tabFile2Name, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + tabFile2Name);
                         
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDown() {   
 
         Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
@@ -484,7 +484,7 @@ public void testRequestAccess() throws InterruptedException {
         Response tab3AddResponse = UtilIT.uploadFileViaNative(datasetIdNew.toString(), tab3PathToFile, apiToken);
         Integer tabFile3IdRestrictedNew = JsonPath.from(tab3AddResponse.body().asString()).getInt("data.files[0].dataFile.id");
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + tab3PathToFile , UtilIT.sleepForLock(datasetIdNew.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetIdNew.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + tab3PathToFile);
         
         Response restrictResponse = UtilIT.restrictFile(tabFile3IdRestrictedNew.toString(), true, apiToken);
         restrictResponse.prettyPrint();
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java
index 2ba06314ddb..a5a4924ad77 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider;
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.GitHubOAuth2AP;
@@ -15,24 +15,25 @@
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import org.junit.Test;
-import org.junit.BeforeClass;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.BeforeAll;
 
 import java.util.Map;
 import java.util.UUID;
 import java.util.logging.Logger;
 
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static junit.framework.Assert.assertEquals;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.notNullValue;
-import static org.junit.Assert.assertTrue;
-import org.junit.Ignore;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class AdminIT {
 
@@ -40,7 +41,7 @@ public class AdminIT {
 
     private final String testNonSuperuserApiToken = createTestNonSuperuserApiToken();
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -604,7 +605,7 @@ public void testRecalculateDataFileHash() {
         String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser);
         UtilIT.makeSuperUser(superuserUsername);
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + origFileId, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", superuserApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", superuserApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + origFileId);
 
         //Bad file id         
         Response computeDataFileHashResponse = UtilIT.computeDataFileHashValue("BadFileId", DataFile.ChecksumType.MD5.toString(), superuserApiToken);
@@ -669,7 +670,7 @@ public void testRecalculateDataFileHash() {
     }
     
     @Test
-    @Ignore
+    @Disabled
     public void testMigrateHDLToDOI() {
         /*
         This test is set to ignore because it requires a setup that will
@@ -733,6 +734,13 @@ public void testMigrateHDLToDOI() {
                 .statusCode(OK.getStatusCode());
     }
 
+    /**
+     * Disabled because once there are new fields in the database that Solr
+     * doesn't know about, dataset creation could be prevented, or at least
+     * subsequent search operations could fail because the dataset can't be
+     * indexed.
+     */
+    @Disabled
     @Test
     public void testLoadMetadataBlock_NoErrorPath() {
         Response createUser = UtilIT.createRandomUser();
@@ -777,6 +785,13 @@ public void testLoadMetadataBlock_NoErrorPath() {
         assertEquals(244, (int) statistics.get("Controlled Vocabulary"));
     }
 
+    /**
+     * Disabled because once there are new fields in the database that Solr
+     * doesn't know about, dataset creation could be prevented, or at least
+     * subsequent search operations could fail because the dataset can't be
+     * indexed.
+     */
+    @Disabled
     @Test
     public void testLoadMetadataBlock_ErrorHandling() {
         Response createUser = UtilIT.createRandomUser();
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AuxiliaryFilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AuxiliaryFilesIT.java
index 0e404f6ba97..754350e93db 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/AuxiliaryFilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/AuxiliaryFilesIT.java
@@ -1,26 +1,26 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import static javax.ws.rs.core.Response.Status.CONFLICT;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
-import static javax.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.CONFLICT;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.Assert;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class AuxiliaryFilesIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -62,7 +62,7 @@ public void testUploadAuxFiles() throws IOException {
 
         Long fileId = JsonPath.from(uploadFile.body().asString()).getLong("data.files[0].dataFile.id");
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToDataFile, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToDataFile);
 
         Response restrictFile = UtilIT.restrictFile(fileId.toString(), true, apiToken);
         restrictFile.prettyPrint();
@@ -243,29 +243,29 @@ public void testUploadAuxFiles() throws IOException {
         // Download JSON aux file.
         Response downloadAuxFileJson = UtilIT.downloadAuxFile(fileId, formatTagJson, formatVersionJson, apiToken);
         downloadAuxFileJson.then().assertThat().statusCode(OK.getStatusCode());
-        Assert.assertEquals("attachment; filename=\"data.tab.dpJson_0.1.json\"", downloadAuxFileJson.header("Content-disposition"));
+        assertEquals("attachment; filename=\"data.tab.dpJson_0.1.json\"", downloadAuxFileJson.header("Content-disposition"));
 
         // Download XML aux file.
         Response downloadAuxFileXml = UtilIT.downloadAuxFile(fileId, formatTagXml, formatVersionXml, apiToken);
         downloadAuxFileXml.then().assertThat().statusCode(OK.getStatusCode());
-        Assert.assertEquals("attachment; filename=\"data.tab.dpXml_0.1.xml\"", downloadAuxFileXml.header("Content-disposition"));
+        assertEquals("attachment; filename=\"data.tab.dpXml_0.1.xml\"", downloadAuxFileXml.header("Content-disposition"));
 
         // Download PDF aux file.
         Response downloadAuxFilePdf = UtilIT.downloadAuxFile(fileId, formatTagPdf, formatVersionPdf, apiToken);
         downloadAuxFilePdf.then().assertThat().statusCode(OK.getStatusCode());
-        Assert.assertEquals("attachment; filename=\"data.tab.dpPdf_0.1.pdf\"", downloadAuxFilePdf.header("Content-disposition"));
+        assertEquals("attachment; filename=\"data.tab.dpPdf_0.1.pdf\"", downloadAuxFilePdf.header("Content-disposition"));
 
         // Download Markdown aux file.
         Response downloadAuxFileMd = UtilIT.downloadAuxFile(fileId, formatTagMd, formatVersionMd, apiToken);
         downloadAuxFileMd.then().assertThat().statusCode(OK.getStatusCode());
         // No file extenstion here because Tika's getDefaultMimeTypes doesn't include "text/markdown".
         // Note: browsers seem to add ".bin" ("myfile.bin") rather than no extension ("myfile").
-        Assert.assertEquals("attachment; filename=\"data.tab.README_0.1\"", downloadAuxFileMd.header("Content-disposition"));
+        assertEquals("attachment; filename=\"data.tab.README_0.1\"", downloadAuxFileMd.header("Content-disposition"));
 
         // Download Markdown aux file with no MIME type given
         Response downloadAuxFileNoMime1 = UtilIT.downloadAuxFile(fileId, formatTagNoMimeType1, formatVersionNoMimeType1, apiToken);
         downloadAuxFileNoMime1.then().assertThat().statusCode(OK.getStatusCode());
-        Assert.assertEquals("attachment; filename=\"data.tab.noMimeType1_0.1.txt\"", downloadAuxFileNoMime1.header("Content-disposition"));
+        assertEquals("attachment; filename=\"data.tab.noMimeType1_0.1.txt\"", downloadAuxFileNoMime1.header("Content-disposition"));
 
         Response createUserNoPrivs = UtilIT.createRandomUser();
         createUserNoPrivs.then().assertThat().statusCode(OK.getStatusCode());
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java b/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java
index 4ac76ac846d..e7210bc45a9 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java
@@ -1,18 +1,18 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.engine.command.impl.LocalSubmitToArchiveCommand;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class BagIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
 
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
@@ -65,7 +65,7 @@ public void testBagItExport() {
 
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
 
         // Not checking if delete happened. Hopefully, it did.
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/BatchImportIT.java b/src/test/java/edu/harvard/iq/dataverse/api/BatchImportIT.java
index 89ad79817d8..c72fe19e494 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/BatchImportIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/BatchImportIT.java
@@ -1,15 +1,15 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
 import java.io.File;
-import com.jayway.restassured.response.Response;
+import io.restassured.response.Response;
 import java.util.logging.Logger;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static javax.ws.rs.core.Response.Status.ACCEPTED;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.CREATED;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import static jakarta.ws.rs.core.Response.Status.ACCEPTED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
 import org.hamcrest.CoreMatchers;
 
 public class BatchImportIT {
@@ -21,7 +21,7 @@ public class BatchImportIT {
     public BatchImportIT() {
     }
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/BuiltinUsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/BuiltinUsersIT.java
index 301cd24b35b..af938cbebe1 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/BuiltinUsersIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/BuiltinUsersIT.java
@@ -1,10 +1,10 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import io.restassured.http.ContentType;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.util.AbstractMap;
@@ -15,18 +15,17 @@
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static junit.framework.Assert.assertEquals;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.Matchers.startsWith;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class BuiltinUsersIT {
 
@@ -37,7 +36,7 @@ public class BuiltinUsersIT {
     private static final String usernameKey = "userName";
     private static final String emailKey = "email";
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ConfirmEmailIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ConfirmEmailIT.java
index e00dba2263b..0fef3d7166e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/ConfirmEmailIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/ConfirmEmailIT.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.util.logging.Logger;
-import static junit.framework.Assert.assertEquals;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
 import static org.hamcrest.CoreMatchers.nullValue;
 import static org.hamcrest.Matchers.startsWith;
 
@@ -18,7 +18,7 @@ public class ConfirmEmailIT {
 
     private static final Logger logger = Logger.getLogger(ConfirmEmailIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataCiteIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataCiteIT.java
index 86c3eed4297..bb4c64dedcf 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DataCiteIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DataCiteIT.java
@@ -1,10 +1,10 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import static junit.framework.Assert.assertEquals;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 /**
  * These tests will only work if you are using "DataCite" rather than "EZID" for
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java
index 69d5db08744..facb3f7c784 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java
@@ -1,21 +1,21 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 import java.util.ArrayList;
 
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class DataRetrieverApiIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java
index 559e5a7dfba..ca99960f240 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse.api;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.util.ArrayList;
 import java.util.List;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class DatasetFieldServiceApiTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
index 866524a2605..b353b4488d0 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
@@ -1,33 +1,34 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
+import io.restassured.RestAssured;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.response.Response;
+import io.restassured.path.json.JsonPath;
+import io.restassured.http.ContentType;
+import io.restassured.response.Response;
 
 import java.util.logging.Logger;
 
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 import org.skyscreamer.jsonassert.JSONAssert;
-import org.junit.Ignore;
-import com.jayway.restassured.path.json.JsonPath;
+import org.junit.jupiter.api.Disabled;
 
 import java.util.List;
 import java.util.Map;
-import javax.json.JsonObject;
-
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.METHOD_NOT_ALLOWED;
-import static javax.ws.rs.core.Response.Status.CONFLICT;
-import static javax.ws.rs.core.Response.Status.NO_CONTENT;
+import jakarta.json.JsonObject;
+
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.METHOD_NOT_ALLOWED;
+import static jakarta.ws.rs.core.Response.Status.CONFLICT;
+import static jakarta.ws.rs.core.Response.Status.NO_CONTENT;
 
 import edu.harvard.iq.dataverse.DataFile;
 
@@ -42,11 +43,11 @@
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.exception.ExceptionUtils;
 
-import com.jayway.restassured.parsing.Parser;
+import io.restassured.parsing.Parser;
 
-import static com.jayway.restassured.path.json.JsonPath.with;
+import static io.restassured.path.json.JsonPath.with;
 
-import com.jayway.restassured.path.xml.XmlPath;
+import io.restassured.path.xml.XmlPath;
 
 import static edu.harvard.iq.dataverse.api.UtilIT.equalToCI;
 
@@ -67,15 +68,15 @@
 import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.HashMap;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.core.Response.Status;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.core.Response.Status;
 import javax.xml.stream.XMLInputFactory;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamReader;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import org.hamcrest.CoreMatchers;
 
@@ -86,14 +87,10 @@
 import static org.hamcrest.CoreMatchers.nullValue;
 import static org.hamcrest.Matchers.contains;
 
-import org.junit.AfterClass;
-import org.junit.Assert;
-
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-import static org.junit.Assert.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 
 public class DatasetsIT {
@@ -102,7 +99,7 @@ public class DatasetsIT {
     
     
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         
         
@@ -132,7 +129,7 @@ public static void setUpClass() {
          */
     }
 
-    @AfterClass
+    @AfterAll
     public static void afterClass() {
 
         Response removeIdentifierGenerationStyle = UtilIT.deleteSetting(SettingsServiceBean.Key.IdentifierGenerationStyle);
@@ -670,9 +667,7 @@ public void testExport() {
         exportDatasetAsDdi.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
-        // This is now returning [] instead of sammi@sample.com. Not sure why.
-        // :ExcludeEmailFromExport is absent so the email should be shown.
-        assertEquals("[]", XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.stdyDscr.stdyInfo.contact.@email"));
+        assertEquals(null, XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.stdyDscr.stdyInfo.contact.@email"));
         assertEquals(datasetPersistentId, XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.docDscr.citation.titlStmt.IDNo"));
 
         Response reexportAllFormats = UtilIT.reexportDatasetAllFormats(datasetPersistentId);
@@ -762,7 +757,7 @@ public void testExcludeEmail() {
 
         assertEquals("Dataverse, Admin", XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.stdyDscr.citation.distStmt.contact"));
         // no "sammi@sample.com" to be found https://github.com/IQSS/dataverse/issues/3443
-        assertEquals("[]", XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.stdyDscr.citation.distStmt.contact.@email"));
+        assertEquals(null, XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.stdyDscr.citation.distStmt.contact.@email"));
         assertEquals("Sample Datasets, inc.", XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.stdyDscr.citation.distStmt.contact.@affiliation"));
         assertEquals(datasetPersistentId, XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.docDscr.citation.titlStmt.IDNo"));
 
@@ -1855,7 +1850,7 @@ public void testCreateDeleteDatasetLink() {
     }
     
     @Test
-    @Ignore
+    @Disabled
     public void testApiErrors() {
 
         /*
@@ -1973,7 +1968,7 @@ public void testDatasetLocksApi() {
                 break;
             } 
         }
-        assertTrue("Lock missing from the output of /api/datasets/locks", lockListedCorrectly);        
+        assertTrue(lockListedCorrectly, "Lock missing from the output of /api/datasets/locks");
         
         // Try the same, but with an api token of a random, non-super user 
         // (this should get rejected):
@@ -2003,7 +1998,7 @@ public void testDatasetLocksApi() {
                 break;
             } 
         }
-        assertTrue("Lock missing from the output of /api/datasets/locks?type=Ingest", lockListedCorrectly);        
+        assertTrue(lockListedCorrectly, "Lock missing from the output of /api/datasets/locks?type=Ingest");
 
         
         // Try to list locks of an invalid type:
@@ -2064,7 +2059,7 @@ public void testDatasetLocksApi() {
      * This test requires the root dataverse to be published to pass.
      */
     @Test
-    @Ignore
+    @Disabled
     public void testUpdatePIDMetadataAPI() {
 
         Response createUser = UtilIT.createRandomUser();
@@ -2317,7 +2312,7 @@ public void testUnrestrictedFileExportDdi() throws IOException {
 
         String fileId = JsonPath.from(uploadFile.body().asString()).getString("data.files[0].dataFile.id");
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
 
         Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, authorApiToken);
         publishDataverse.then().assertThat().statusCode(OK.getStatusCode());
@@ -2389,7 +2384,7 @@ public void testRestrictFileExportDdi() throws IOException {
 
         String fileId = JsonPath.from(uploadFile.body().asString()).getString("data.files[0].dataFile.id");
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
 
         Response restrictFile = UtilIT.restrictFile(fileId, true, authorApiToken);
         restrictFile.prettyPrint();
@@ -2410,7 +2405,7 @@ public void testRestrictFileExportDdi() throws IOException {
 
         // Here we are asserting that dataDscr is empty. TODO: Do this in REST Assured.
         String dataDscrForGuest = XmlPath.from(exportByGuest.asString()).getString("codeBook.dataDscr");
-        Assert.assertEquals("", dataDscrForGuest);
+        assertEquals("", dataDscrForGuest);
 
         // Author export (has access)
         Response exportByAuthor = UtilIT.exportDataset(datasetPid, "ddi", authorApiToken);
@@ -2421,7 +2416,7 @@ public void testRestrictFileExportDdi() throws IOException {
 
         // Here we are asserting that dataDscr is empty. TODO: Do this in REST Assured.
         String dataDscrForAuthor = XmlPath.from(exportByAuthor.asString()).getString("codeBook.dataDscr");
-        Assert.assertEquals("", dataDscrForAuthor);
+        assertEquals("", dataDscrForAuthor);
 
         // Now we are testing file-level retrieval.
         // The author has access to a restricted file and gets all the metadata.
@@ -2807,7 +2802,7 @@ public void testCuratePublishedDatasetVersionCommand() throws IOException {
 
         // Give file time to ingest
         
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest);
         
         Response origXml = UtilIT.getFileMetadata(origFileId, null, apiToken);
         assertEquals(200, origXml.getStatusCode());
@@ -2947,7 +2942,7 @@ public void testRestrictFileTermsOfUseAndAccess() throws IOException {
 
         String fileId = JsonPath.from(uploadFile.body().asString()).getString("data.files[0].dataFile.id");
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
 
         Response restrictFile = UtilIT.restrictFile(fileId, true, authorApiToken);
         restrictFile.prettyPrint();
@@ -3017,7 +3012,7 @@ public void testRestrictFilesWORequestAccess() throws IOException {
 
         String fileId = JsonPath.from(uploadFile.body().asString()).getString("data.files[0].dataFile.id");
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
 
         Response restrictFile = UtilIT.restrictFile(fileId, true, authorApiToken);
         restrictFile.prettyPrint();
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsTest.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsTest.java
index fded590d9db..58aa366c9e5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsTest.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse.api;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import java.util.HashSet;
 import java.util.Set;
 import java.util.function.Predicate;
 import java.util.stream.Collectors;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class DatasetsTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
index 4a4095e6c91..09052f9e4ea 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
@@ -1,9 +1,9 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import static com.jayway.restassured.path.json.JsonPath.with;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import static io.restassured.path.json.JsonPath.with;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
@@ -13,27 +13,26 @@
 import java.nio.file.Paths;
 import java.util.Arrays;
 import java.util.List;
-import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
-import javax.ws.rs.core.Response.Status;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.OK;
-import static junit.framework.Assert.assertEquals;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
+import jakarta.ws.rs.core.Response.Status;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.OK;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.AfterClass;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 import java.nio.file.Files;
-import com.jayway.restassured.path.json.JsonPath;
-import static javax.ws.rs.core.Response.Status.OK;
+import io.restassured.path.json.JsonPath;
 import org.hamcrest.CoreMatchers;
 import org.hamcrest.Matchers;
 
@@ -41,12 +40,12 @@ public class DataversesIT {
 
     private static final Logger logger = Logger.getLogger(DataversesIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
     
-    @AfterClass
+    @AfterAll
     public static void afterClass() {
         Response removeExcludeEmail = UtilIT.deleteSetting(SettingsServiceBean.Key.ExcludeEmailFromExport);
     }
@@ -373,7 +372,7 @@ public void testMoveDataverse() {
         while (checkIndex) {
             try {   
                     try {
-                        Thread.sleep(2000);
+                        Thread.sleep(4000);
                     } catch (InterruptedException ex) {
                     }                
                 Response search = UtilIT.search("id:dataverse_" + dataverseId + "&subtree=" + dataverseAlias2, apiToken);
@@ -510,6 +509,13 @@ public void testImportDDI() throws IOException, InterruptedException {
         logger.info(importDDI.prettyPrint());
         assertEquals(201, importDDI.getStatusCode());
 
+        // Under normal conditions, you shouldn't need to destroy these datasets.
+        // Uncomment if they're still around from a previous failed run.
+//        Response destroy1 = UtilIT.destroyDataset("doi:10.5072/FK2/ABCD11", apiToken);
+//        destroy1.prettyPrint();
+//        Response destroy2 = UtilIT.destroyDataset("doi:10.5072/FK2/ABCD22", apiToken);
+//        destroy2.prettyPrint();
+
         Response importDDIPid = UtilIT.importDatasetDDIViaNativeApi(apiToken, dataverseAlias, xml,  "doi:10.5072/FK2/ABCD11", "no");
         logger.info(importDDIPid.prettyPrint());
         assertEquals(201, importDDIPid.getStatusCode());
@@ -561,12 +567,8 @@ public void testImportDDI() throws IOException, InterruptedException {
         Integer datasetIdIntPidRel = JsonPath.from(importDDIPidRel.body().asString()).getInt("data.id");
         Response destroyDatasetResponsePidRel = UtilIT.destroyDataset(datasetIdIntPidRel, apiToken);
         assertEquals(200, destroyDatasetResponsePidRel.getStatusCode());
-
-        // This last dataset we have just imported, let's give it a sec. to finish indexing (?)
-        // or whatever it is that may still be happening. (Have been seeing intermittent 500 from the next
-        // destroyDataset() line lately)
         
-        Thread.sleep(1000L); 
+        UtilIT.sleepForDeadlock(UtilIT.MAXIMUM_IMPORT_DURATION);
 
         Integer datasetIdIntRelease = JsonPath.from(importDDIRelease.body().asString()).getInt("data.id");
         Response destroyDatasetResponseRelease = UtilIT.destroyDataset(datasetIdIntRelease, apiToken);
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesTest.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesTest.java
index 1ebba65c9c4..512b07912dd 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesTest.java
@@ -22,18 +22,19 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
 import org.mockito.ArgumentCaptor;
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
 import org.mockito.Mockito;
-import org.mockito.junit.MockitoJUnitRunner;
 
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.Response;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Response;
+import org.mockito.junit.jupiter.MockitoExtension;
+
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
@@ -43,7 +44,7 @@
  *
  * @author adaybujeda
  */
-@RunWith(MockitoJUnitRunner.class)
+@ExtendWith(MockitoExtension.class)
 public class DataversesTest {
     // From AbstractApiBean class
     @Mock
@@ -78,7 +79,7 @@ public class DataversesTest {
 
     private Dataverse VALID_DATAVERSE;
 
-    @Before
+    @BeforeEach
     public void beforeEachTest() {
         VALID_DATAVERSE = new Dataverse();
         VALID_DATAVERSE.setId(MocksFactory.nextId());
@@ -225,7 +226,7 @@ public void updateMetadataBlockFacetsRoot_should_return_200_and_make_no_update_w
         Response result = target.updateMetadataBlockFacetsRoot(containerRequestContext, VALID_DATAVERSE.getAlias(), "true");
 
         MatcherAssert.assertThat(result.getStatus(), Matchers.is(200));
-        Mockito.verifyZeroInteractions(engineSvc);
+        Mockito.verifyNoInteractions(engineSvc);
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java
index 801c7cbf7e7..2b29de3e447 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java
@@ -1,25 +1,25 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.startsWith;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class DeactivateUsersIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java
index cae1d0e210a..13f48f9b854 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import java.io.File;
 import java.io.IOException;
@@ -11,16 +11,16 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static junit.framework.Assert.assertEquals;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 /**
  * The following query has been helpful in discovering places where user ids
@@ -130,7 +130,7 @@
  */
 public class DeleteUsersIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java
index 7d5adf95507..598ba36c1e1 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java
@@ -1,9 +1,9 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Headers;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.http.Headers;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
@@ -16,20 +16,20 @@
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipInputStream;
 import java.util.zip.ZipOutputStream;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.Assert;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class DownloadFilesIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -90,7 +90,7 @@ public void downloadAllFilesByVersion() throws IOException {
 
         // Note that a MANIFEST.TXT file is added.
         HashSet<String> expectedFiles1 = new HashSet<>(Arrays.asList("MANIFEST.TXT", "README.md", "CONTRIBUTING.md"));
-        Assert.assertEquals(expectedFiles1, filenamesFound1);
+        assertEquals(expectedFiles1, filenamesFound1);
 
         // A guest user can't download unpublished files.
         // (a guest user cannot even see that the draft version actually exists;
@@ -130,7 +130,7 @@ public void downloadAllFilesByVersion() throws IOException {
 
         // The creator gets the draft version with an extra file.
         HashSet<String> expectedFiles2 = new HashSet<>(Arrays.asList("LICENSE.md", "MANIFEST.TXT", "README.md", "CONTRIBUTING.md"));
-        Assert.assertEquals(expectedFiles2, filenamesFound2);
+        assertEquals(expectedFiles2, filenamesFound2);
 
         Response downloadFiles5 = UtilIT.downloadFiles(datasetPid, null);
         downloadFiles5.then().assertThat()
@@ -140,7 +140,7 @@ public void downloadAllFilesByVersion() throws IOException {
 
         // A guest user gets the 1.0 version with only 3 files.
         HashSet<String> expectedFiles3 = new HashSet<>(Arrays.asList("MANIFEST.TXT", "README.md", "CONTRIBUTING.md"));
-        Assert.assertEquals(expectedFiles3, filenamesFound3);
+        assertEquals(expectedFiles3, filenamesFound3);
 
         // Publishing version 2.0
         UtilIT.publishDatasetViaNativeApi(datasetPid, "major", apiToken)
@@ -154,7 +154,7 @@ public void downloadAllFilesByVersion() throws IOException {
 
         // By not specifying a version, the creator gets the latest version. In this case, 2.0 (published) with 4 files.
         HashSet<String> expectedFiles4 = new HashSet<>(Arrays.asList("LICENSE.md", "MANIFEST.TXT", "README.md", "CONTRIBUTING.md"));
-        Assert.assertEquals(expectedFiles4, filenamesFound4);
+        assertEquals(expectedFiles4, filenamesFound4);
 
         String datasetVersion = "1.0";
         Response downloadFiles7 = UtilIT.downloadFiles(datasetPid, datasetVersion, apiToken);
@@ -165,7 +165,7 @@ public void downloadAllFilesByVersion() throws IOException {
 
         // Creator specifies the 1.0 version and gets the expected 3 files.
         HashSet<String> expectedFiles5 = new HashSet<>(Arrays.asList("MANIFEST.TXT", "README.md", "CONTRIBUTING.md"));
-        Assert.assertEquals(expectedFiles5, filenamesFound5);
+        assertEquals(expectedFiles5, filenamesFound5);
 
         // Add Code of Conduct file
         Path pathtoCocFile = Paths.get(Files.createTempDirectory(null) + File.separator + "CODE_OF_CONDUCT.md");
@@ -186,7 +186,7 @@ public void downloadAllFilesByVersion() throws IOException {
 
         // If the creator doesn't specify a version, they get the latest draft with 5 files.
         HashSet<String> expectedFiles6 = new HashSet<>(Arrays.asList("CODE_OF_CONDUCT.md", "LICENSE.md", "MANIFEST.TXT", "README.md", "CONTRIBUTING.md"));
-        Assert.assertEquals(expectedFiles6, filenamesFound6);
+        assertEquals(expectedFiles6, filenamesFound6);
 
         String datasetVersionLatestPublished = ":latest-published";
         Response downloadFiles9 = UtilIT.downloadFiles(datasetPid, datasetVersionLatestPublished, apiToken);
@@ -197,7 +197,7 @@ public void downloadAllFilesByVersion() throws IOException {
 
         // The contributor requested "latest published" and got version 3 with 4 files.
         HashSet<String> expectedFiles7 = new HashSet<>(Arrays.asList("LICENSE.md", "MANIFEST.TXT", "README.md", "CONTRIBUTING.md"));
-        Assert.assertEquals(expectedFiles7, filenamesFound7);
+        assertEquals(expectedFiles7, filenamesFound7);
 
         // Guests cannot download draft versions.
         String datasetVersionDraft = ":draft";
@@ -266,14 +266,14 @@ public void downloadAllFilesRestricted() throws IOException {
                 .statusCode(OK.getStatusCode());
 
         // The creator can download a restricted file from a draft.
-        Assert.assertEquals(new HashSet<>(Arrays.asList("secrets.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles1.getBody().asInputStream()));
+        assertEquals(new HashSet<>(Arrays.asList("secrets.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles1.getBody().asInputStream()));
 
         Response downloadFiles2 = UtilIT.downloadFiles(datasetPid, apiToken);
         downloadFiles2.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
         // The creator can download a restricted file and an unrestricted file from a draft.
-        Assert.assertEquals(new HashSet<>(Arrays.asList("secrets.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles2.getBody().asInputStream()));
+        assertEquals(new HashSet<>(Arrays.asList("secrets.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles2.getBody().asInputStream()));
 
         UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken)
                 .then().assertThat().statusCode(OK.getStatusCode());
@@ -307,14 +307,14 @@ public void downloadAllFilesRestricted() throws IOException {
                 .statusCode(OK.getStatusCode());
 
         // The guest can only get the unrestricted file (and the manifest).
-        Assert.assertEquals(new HashSet<>(Arrays.asList("README.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles4.getBody().asInputStream()));
+        assertEquals(new HashSet<>(Arrays.asList("README.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles4.getBody().asInputStream()));
 
         Response downloadFiles5 = UtilIT.downloadFiles(datasetPid, apiToken);
         downloadFiles5.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
         // The creator can download both files (and the manifest).
-        Assert.assertEquals(new HashSet<>(Arrays.asList("secrets.md", "README.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles5.getBody().asInputStream()));
+        assertEquals(new HashSet<>(Arrays.asList("secrets.md", "README.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles5.getBody().asInputStream()));
 
     }
 
@@ -356,21 +356,21 @@ public void downloadAllFilesTabular() throws IOException {
                 .body("data.files[0].label", equalTo("50by1000.dta"));
 
         // UtilIT.MAXIMUM_INGEST_LOCK_DURATION is 3 but not long enough.
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION + 3));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION + 3), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
 
         Response downloadFiles1 = UtilIT.downloadFiles(datasetPid, apiToken);
         downloadFiles1.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
         // By default we get the archival version (.tab).
-        Assert.assertEquals(new HashSet<>(Arrays.asList("50by1000.tab", "MANIFEST.TXT")), gatherFilenames(downloadFiles1.getBody().asInputStream()));
+        assertEquals(new HashSet<>(Arrays.asList("50by1000.tab", "MANIFEST.TXT")), gatherFilenames(downloadFiles1.getBody().asInputStream()));
 
         Response downloadFiles2 = UtilIT.downloadFiles(datasetPid, UtilIT.DownloadFormat.original, apiToken);
         downloadFiles2.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
         // By passing format=original we get the original version, Stata (.dta) in this case.
-        Assert.assertEquals(new HashSet<>(Arrays.asList("50by1000.dta", "MANIFEST.TXT")), gatherFilenames(downloadFiles2.getBody().asInputStream()));
+        assertEquals(new HashSet<>(Arrays.asList("50by1000.dta", "MANIFEST.TXT")), gatherFilenames(downloadFiles2.getBody().asInputStream()));
     }
 
     /**
@@ -437,8 +437,8 @@ public void downloadFilenameUtf8() throws IOException {
                 .statusCode(OK.getStatusCode());
         Headers headers = downloadFile.getHeaders();
         // In "MY READ–ME.md" below the space is %20 and the en-dash ("–") is "%E2%80%93" (e2 80 93 in hex).
-        Assert.assertEquals("attachment; filename=\"MY%20READ%E2%80%93ME.md\"", headers.getValue("Content-disposition"));
-        Assert.assertEquals("text/markdown; name=\"MY%20READ%E2%80%93ME.md\";charset=UTF-8", headers.getValue("Content-Type"));
+        assertEquals("attachment; filename=\"MY%20READ%E2%80%93ME.md\"", headers.getValue("Content-disposition"));
+        assertEquals("text/markdown; name=\"MY%20READ%E2%80%93ME.md\";charset=UTF-8", headers.getValue("Content-Type"));
 
         // Download all files as a zip and assert "MY READ–ME.md" has an en-dash.
         Response downloadFiles = UtilIT.downloadFiles(datasetPid, apiToken);
@@ -450,7 +450,7 @@ public void downloadFilenameUtf8() throws IOException {
         // Note that a MANIFEST.TXT file is added.
         // "MY READ–ME.md" (with an en-dash) is correctly extracted from the downloaded zip
         HashSet<String> expectedFiles = new HashSet<>(Arrays.asList("MANIFEST.TXT", "MY READ–ME.md"));
-        Assert.assertEquals(expectedFiles, filenamesFound);
+        assertEquals(expectedFiles, filenamesFound);
     }
 
     private HashSet<String> gatherFilenames(InputStream inputStream) throws IOException {
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriterTest.java b/src/test/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriterTest.java
index 6de52951077..f7f61d18a51 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriterTest.java
@@ -2,16 +2,16 @@
 
 import edu.harvard.iq.dataverse.dataaccess.Range;
 import java.util.List;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import org.junit.Before;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 public class DownloadInstanceWriterTest {
 
     DownloadInstanceWriter diw;
 
-    @Before
+    @BeforeEach
     public void setUpClass() {
         diw = new DownloadInstanceWriter();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DuplicateFilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DuplicateFilesIT.java
index 6227e96fdfa..32b949fad25 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DuplicateFilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DuplicateFilesIT.java
@@ -1,23 +1,23 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.parsing.Parser;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.parsing.Parser;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.nullValue;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 /**
  * Make assertions about duplicate file names (and maybe in the future,
@@ -25,7 +25,7 @@
  */
 public class DuplicateFilesIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java b/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java
index 1775649c3dd..3b690163a39 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java
@@ -1,18 +1,16 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import static edu.harvard.iq.dataverse.api.AccessIT.apiToken;
-import static edu.harvard.iq.dataverse.api.AccessIT.datasetId;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 
 
 import edu.harvard.iq.dataverse.datavariable.VarGroup;
 import edu.harvard.iq.dataverse.datavariable.VariableMetadata;
 import edu.harvard.iq.dataverse.datavariable.VariableMetadataDDIParser;
 
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 
 import java.nio.file.Paths;
@@ -28,20 +26,21 @@
 import javax.xml.stream.XMLStreamException;
 
 
-import static javax.ws.rs.core.Response.Status.OK;
-import static junit.framework.Assert.assertEquals;
+import static jakarta.ws.rs.core.Response.Status.OK;
 
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertNotNull;
 import java.nio.file.Files;
-import static org.junit.Assert.assertTrue;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 
 public class EditDDIIT {
 
     private static final Logger logger = Logger.getLogger(EditDDIIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -81,7 +80,7 @@ public void testUpdateVariableMetadata() throws InterruptedException {
 
         // Give file time to ingest
         
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest);
         
         Response origXml = UtilIT.getFileMetadata(origFileId, null, apiToken);
         assertEquals(200, origXml.getStatusCode());
@@ -150,6 +149,7 @@ public void testUpdateVariableMetadata() throws InterruptedException {
         assertEquals(200, publishDataset.getStatusCode());
 
         Response editDDIResponseNewDraft = UtilIT.editDDI(stringOrigXml, origFileId, apiToken);
+        editDDIResponseNewDraft.prettyPrint();
         assertEquals(200, editDDIResponseNewDraft.getStatusCode());
 
         //not authorized
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java
index 94d47c51b44..6f414fb3e24 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java
@@ -1,31 +1,31 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.io.File;
 import java.io.IOException;
 import java.io.StringReader;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonReader;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import org.hamcrest.CoreMatchers;
 import org.hamcrest.Matchers;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import org.junit.Ignore;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.Disabled;
 
 public class ExternalToolsIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -89,7 +89,7 @@ public void testFileLevelTool1() {
         uploadTabularFile.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToTabularFile, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToTabularFile);
         Integer tabularFileId = JsonPath.from(uploadTabularFile.getBody().asString()).getInt("data.files[0].dataFile.id");
 
         JsonObjectBuilder job = Json.createObjectBuilder();
@@ -309,7 +309,7 @@ public void testAddExternalToolNonReservedWord() throws IOException {
                 .statusCode(BAD_REQUEST.getStatusCode());
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void deleteTools() {
 
@@ -330,7 +330,7 @@ public void deleteTools() {
     }
 
     // preview only
-    @Ignore
+    @Disabled
     @Test
     public void createToolShellScript() {
         JsonObjectBuilder job = Json.createObjectBuilder();
@@ -370,7 +370,7 @@ public void createToolShellScript() {
     }
 
     // explore only
-    @Ignore
+    @Disabled
     @Test
     public void createToolDataExplorer() {
         JsonObjectBuilder job = Json.createObjectBuilder();
@@ -403,7 +403,7 @@ public void createToolDataExplorer() {
     }
 
     // both preview and explore
-    @Ignore
+    @Disabled
     @Test
     public void createToolSpreadsheetViewer() {
         JsonObjectBuilder job = Json.createObjectBuilder();
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FeedbackApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FeedbackApiIT.java
index bfe4a58abbf..220d386e28e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FeedbackApiIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FeedbackApiIT.java
@@ -1,19 +1,19 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import org.hamcrest.CoreMatchers;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class FeedbackApiIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FileMetadataIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FileMetadataIT.java
index 1ca7c99be8e..619a7594244 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FileMetadataIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FileMetadataIT.java
@@ -1,22 +1,23 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.http.ContentType;
+import io.restassured.response.Response;
 import org.apache.commons.io.IOUtils;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 import java.util.UUID;
 
-import static com.jayway.restassured.RestAssured.given;
-import static javax.ws.rs.core.Response.Status.OK;
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.fail;
+import static io.restassured.RestAssured.given;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
 
 public class FileMetadataIT {
 
@@ -32,12 +33,12 @@ public class FileMetadataIT {
     private static int dsId;
     private static int dsIdFirst;
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
 
-    @Before
+    @BeforeEach
     public void setUpDataverse() {
         try {
             // create random test name
@@ -79,17 +80,16 @@ public void setUpDataverse() {
                     .then().assertThat().statusCode(201);
             System.out.println("DATAVERSE: " + RestAssured.baseURI + "/dataverse/" + testName);
         } catch (Exception e) {
-            System.out.println("Error setting up test dataverse: " + e.getMessage());
-            fail();
+            fail("Error setting up test dataverse: " + e.getMessage(), e);
         }
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
         RestAssured.reset();
     }
 
-    @After
+    @AfterEach
     public void tearDownDataverse() {
         try {
             // delete dataset
@@ -165,9 +165,7 @@ public void testJsonParserWithDirectoryLabels() {
                     .statusCode(200);
 
         } catch (Exception e) {
-            System.out.println("Error testJsonParserWithDirectoryLabels: " + e.getMessage());
-            e.printStackTrace();
-            fail();
+            fail("Error testJsonParserWithDirectoryLabels: " + e.getMessage(), e);
         }
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FileTypeDetectionIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FileTypeDetectionIT.java
index 10ac9192205..989688b0af6 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FileTypeDetectionIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FileTypeDetectionIT.java
@@ -1,21 +1,21 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.nullValue;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class FileTypeDetectionIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index 5321159d1fb..d0f20a8642b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -1,54 +1,62 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
 import java.util.logging.Logger;
 
 import edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism;
-import org.junit.Test;
-import org.junit.BeforeClass;
-import com.jayway.restassured.path.json.JsonPath;
-import static com.jayway.restassured.path.json.JsonPath.with;
-import com.jayway.restassured.path.xml.XmlPath;
-import static edu.harvard.iq.dataverse.api.AccessIT.apiToken;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.BeforeAll;
+import io.restassured.path.json.JsonPath;
+import static io.restassured.path.json.JsonPath.with;
+import io.restassured.path.xml.XmlPath;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import edu.harvard.iq.dataverse.util.StringUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.File;
 import java.io.IOException;
 
 import static java.lang.Thread.sleep;
-import java.math.BigDecimal;
+
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.text.MessageFormat;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Map;
-import java.util.ResourceBundle;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
 
-import static javax.ws.rs.core.Response.Status.*;
-import static junit.framework.Assert.assertEquals;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+
+import static jakarta.ws.rs.core.Response.Status.*;
 import org.hamcrest.CoreMatchers;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.startsWith;
 import static org.hamcrest.CoreMatchers.nullValue;
 import org.hamcrest.Matchers;
+import org.junit.jupiter.api.AfterAll;
 
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class FilesIT {
 
     private static final Logger logger = Logger.getLogger(FilesIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
+
+        Response removePublicInstall = UtilIT.deleteSetting(SettingsServiceBean.Key.PublicInstall);
+        removePublicInstall.then().assertThat().statusCode(200);
+
+    }
+
+    @AfterAll
+    public static void tearDownClass() {
+        UtilIT.deleteSetting(SettingsServiceBean.Key.PublicInstall);
     }
 
     /**
@@ -504,7 +512,7 @@ public void test_006_ReplaceFileGoodTabular() throws InterruptedException {
 
         // give file time to ingest
        // sleep(10000);
-       assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+       assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
 
         Response ddi = UtilIT.getFileMetadata(origFileId.toString(), "ddi", apiToken);
 //        ddi.prettyPrint();
@@ -1097,6 +1105,9 @@ public void testAccessFacet() {
         msg("Add initial file");
         String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
         Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
+        
+        // Wait a little while for the index to pick up the file, otherwise timing issue with searching for it.
+        UtilIT.sleepForReindex(datasetId.toString(), apiToken, 4);
 
         String successMsgAdd = BundleUtil.getStringFromBundle("file.addreplace.success.add");
 
@@ -1107,9 +1118,9 @@ public void testAccessFacet() {
 
         long fileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id");
 
-        Response searchShouldFindNothingBecauseUnpublished = UtilIT.search("id:datafile_" + fileId + "_draft", apiToken);
-        searchShouldFindNothingBecauseUnpublished.prettyPrint();
-        searchShouldFindNothingBecauseUnpublished.then().assertThat()
+        Response searchShouldFindBecauseAuthorApiTokenSupplied = UtilIT.search("id:datafile_" + fileId + "_draft", apiToken);
+        searchShouldFindBecauseAuthorApiTokenSupplied.prettyPrint();
+        searchShouldFindBecauseAuthorApiTokenSupplied.then().assertThat()
                 .body("data.total_count", equalTo(1))
                 .statusCode(OK.getStatusCode());
 
@@ -1205,7 +1216,7 @@ public void testUningestFileViaApi() throws InterruptedException {
         assertNotNull(origFileId);    // If checkOut fails, display message
        // sleep(10000);
         
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
         Response uningestFileResponse = UtilIT.uningestFile(origFileId, apiToken);
         assertEquals(200, uningestFileResponse.getStatusCode());       
     }
@@ -1239,7 +1250,7 @@ public void testFileMetaDataGetUpdateRoundTrip() throws InterruptedException {
         Long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id");
         
         //sleep(2000); //ensure tsv is consumed
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
         msg("Publish dataverse and dataset");
         Response publishDataversetResp = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken);
         publishDataversetResp.then().assertThat()
@@ -1337,7 +1348,7 @@ public void testDataSizeInDataverse() throws InterruptedException {
                 .statusCode(OK.getStatusCode());
         
         // wait for it to ingest... 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, 5));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, 5), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
      //   sleep(10000);
      
         Response publishDataversetResp = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken);
@@ -1401,8 +1412,8 @@ public void testGetFileInfo() {
         createUser = UtilIT.createRandomUser();
         String apiTokenRegular = UtilIT.getApiTokenFromResponse(createUser);
 
-        msg("Add tabular file");
-        String pathToFile = "scripts/search/data/tabular/stata13-auto-withstrls.dta";
+        msg("Add a non-tabular file");
+        String pathToFile = "scripts/search/data/binary/trees.png";
         Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
 
         String dataFileId = addResponse.getBody().jsonPath().getString("data.files[0].dataFile.id");
@@ -1414,10 +1425,12 @@ public void testGetFileInfo() {
 
         getFileDataResponse.prettyPrint();
         getFileDataResponse.then().assertThat()
-                .body("data.label", equalTo("stata13-auto-withstrls.dta"))
-                .body("data.dataFile.filename", equalTo("stata13-auto-withstrls.dta"))
+                .body("data.label", equalTo("trees.png"))
+                .body("data.dataFile.filename", equalTo("trees.png"))
+                .body("data.dataFile.contentType", equalTo("image/png"))
+                .body("data.dataFile.filesize", equalTo(8361))
                 .statusCode(OK.getStatusCode());
-
+        
         getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular);
         getFileDataResponse.then().assertThat()
                 .statusCode(BAD_REQUEST.getStatusCode());
@@ -1479,7 +1492,7 @@ public void testValidateDDI_issue6027() throws InterruptedException {
                 .statusCode(OK.getStatusCode());
 
         // give file time to ingest
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
        // sleep(10000);
 
         Response ddi = UtilIT.getFileMetadata(origFileId.toString(), "ddi", apiToken);
@@ -1734,7 +1747,7 @@ public void testRange() throws IOException {
 
         Integer fileIdCsv = JsonPath.from(uploadFileCsv.body().asString()).getInt("data.files[0].dataFile.id");
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToCsv, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToCsv);
 
         // Just the tabular file, not the original, no byte range. Vanilla.
         Response downloadFileNoArgs = UtilIT.downloadFile(fileIdCsv, null, null, null, authorApiToken);
@@ -1867,7 +1880,7 @@ public void testAddFileToDatasetSkipTabIngest() throws IOException, InterruptedE
         logger.info(r.prettyPrint());
         assertEquals(200, r.getStatusCode());
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile, UtilIT.sleepForLock(datasetIdInt, "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetIdInt, "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
 
         Long dataFileId = JsonPath.from(r.body().asString()).getLong("data.files[0].dataFile.id");
         Response fileMeta = UtilIT.getDataFileMetadataDraft(dataFileId, apiToken);
@@ -1880,7 +1893,7 @@ public void testAddFileToDatasetSkipTabIngest() throws IOException, InterruptedE
         logger.info(rTabIngest.prettyPrint());
         assertEquals(200, rTabIngest.getStatusCode());
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile, UtilIT.sleepForLock(datasetIdInt, "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetIdInt, "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
 
         Long ingDataFileId = JsonPath.from(rTabIngest.body().asString()).getLong("data.files[0].dataFile.id");
         Response ingFileMeta = UtilIT.getDataFileMetadataDraft(ingDataFileId, apiToken);
@@ -2094,8 +2107,8 @@ public void testFilePIDsBehavior() {
             fileInfoResponseString = fileInfoResponse.body().asString();
             msg(fileInfoResponseString);
 
-            org.junit.Assert.assertEquals("The file was NOT supposed to be issued a PID", "",
-                    JsonPath.from(fileInfoResponseString).getString("data.dataFile.persistentId"));
+            assertEquals("", JsonPath.from(fileInfoResponseString).getString("data.dataFile.persistentId"),
+                "The file was NOT supposed to be issued a PID");
         } finally {
             UtilIT.deleteSetting(SettingsServiceBean.Key.FilePIDsEnabled);
             UtilIT.deleteSetting(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection);
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FitsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FitsIT.java
index b154205ce2d..e788efc9c87 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FitsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FitsIT.java
@@ -1,23 +1,23 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.path.json.JsonPath.with;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import static io.restassured.path.json.JsonPath.with;
+import io.restassured.response.Response;
 import java.io.IOException;
 import java.util.List;
 import java.util.Map;
-import javax.json.Json;
-import javax.json.JsonObject;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class FitsIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -60,6 +60,7 @@ public void testAstroFieldsFromFits() throws IOException {
         getJson.prettyPrint();
         getJson.then().assertThat()
                 .statusCode(OK.getStatusCode())
+                .body("data.latestVersion.files[0].description", equalTo("FITS file, 2 HDUs total:\nThe primary HDU; 1 Table HDU(s) 1 Image HDU(s); \nThe following recognized metadata keys have been found in the FITS file:\nCRVAL2; NAXIS; INSTRUME; NAXIS1; NAXIS0; EXPTIME; CD1_1; CRVAL1; TARGNAME; DATE-OBS; \n"))
                 .body("data.latestVersion.metadataBlocks.astrophysics.fields[0].value[0]", equalTo("Image"));
 
         // a bit more precise than the check for "Image" above (but annoyingly fiddly)
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java
index a470bd8fb27..d5388e510d2 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java
@@ -1,21 +1,23 @@
 package edu.harvard.iq.dataverse.api;
 
 import java.util.logging.Logger;
-import java.util.logging.Level;
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.path.json.JsonPath;
-import org.junit.Test;
-import com.jayway.restassured.response.Response;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static javax.ws.rs.core.Response.Status.ACCEPTED;
-import static javax.ws.rs.core.Response.Status.OK;
+
+import org.junit.jupiter.api.Test;
+
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static jakarta.ws.rs.core.Response.Status.ACCEPTED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
 
 /**
  * This class tests Harvesting Client functionality. 
@@ -41,7 +43,7 @@ public class HarvestingClientsIT {
     private static String adminUserAPIKey;
     private static String harvestCollectionAlias; 
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
         
@@ -214,7 +216,7 @@ public void testHarvestingClientRun()  throws InterruptedException {
         
             assertEquals(OK.getStatusCode(), getClientResponse.getStatusCode());
             JsonPath responseJsonPath = getClientResponse.body().jsonPath();
-            assertNotNull("Invalid JSON in GET client response", responseJsonPath);
+            assertNotNull(responseJsonPath, "Invalid JSON in GET client response");
             assertEquals(ApiConstants.STATUS_OK, responseJsonPath.getString("status"));
             
             String clientStatus = responseJsonPath.getString("data.status");
@@ -228,10 +230,10 @@ public void testHarvestingClientRun()  throws InterruptedException {
                         + getClientResponse.prettyPrint());
                 // Check the values in the response:
                 // a) Confirm that the harvest has completed: 
-                assertEquals("Unexpected client status: "+clientStatus, "inActive", clientStatus);
+                assertEquals("inActive", clientStatus, "Unexpected client status: "+clientStatus);
                 
                 // b) Confirm that it has actually succeeded:
-                assertEquals("Last harvest not reported a success (took "+i+" seconds)", "SUCCESS", responseJsonPath.getString("data.lastResult"));
+                assertEquals("SUCCESS", responseJsonPath.getString("data.lastResult"), "Last harvest not reported a success (took "+i+" seconds)");
                 String harvestTimeStamp = responseJsonPath.getString("data.lastHarvest");
                 assertNotNull(harvestTimeStamp); 
                 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java
index 276909d1c23..e02964ef28f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java
@@ -2,26 +2,29 @@
 
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import org.junit.BeforeClass;
-import org.junit.AfterClass;
-import org.junit.Test;
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import com.jayway.restassured.response.Response;
-import com.jayway.restassured.path.xml.XmlPath;
-import com.jayway.restassured.path.xml.element.Node;
+import io.restassured.response.Response;
+import io.restassured.path.xml.XmlPath;
+import io.restassured.path.xml.element.Node;
+
 import java.util.ArrayList;
 import java.util.Collections;
-import static javax.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
 import java.util.List;
 import java.util.Set;
 import java.util.HashSet;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
+
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  * Tests for the Harvesting Server functionality
@@ -38,7 +41,7 @@ public class HarvestingServerIT {
     private static String singleSetDatasetPersistentId;
     private static List<String> extraDatasetsIdentifiers = new ArrayList<>();
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
 	// enable harvesting server
@@ -53,7 +56,7 @@ public static void setUpClass() {
         
     }
 
-    @AfterClass
+    @AfterAll
     public static void afterClass() {
 	// disable harvesting server (default value)
 	Response enableHarvestingServerResponse = UtilIT.setSetting(SettingsServiceBean.Key.OAIServerEnabled,"false");
@@ -373,10 +376,10 @@ public void testSetEditAPIandOAIlistSets() {
         List<Node> listSets = responseXmlPath.getList("OAI-PMH.ListSets.set.list().findAll{it.setName=='"+setName+"'}", Node.class);
         
         // 2a. Confirm that our set is listed:
-        assertNotNull("Unexpected response from ListSets", listSets);
-        assertTrue("Newly-created set isn't properly listed by the OAI server", listSets.size() == 1);
+        assertNotNull(listSets, "Unexpected response from ListSets");
+        assertEquals(1, listSets.size(), "Newly-created set isn't properly listed by the OAI server");
         // 2b. Confirm that the set entry contains the updated description: 
-        assertEquals("Incorrect description in the ListSets entry", newDescription, listSets.get(0).getPath("setDescription.metadata.element.field", String.class));
+        assertEquals(newDescription, listSets.get(0).getPath("setDescription.metadata.element.field", String.class), "Incorrect description in the ListSets entry");
         
         // ok, the xml record looks good! 
 
@@ -603,12 +606,11 @@ public void testMultiRecordOaiSet() throws InterruptedException {
 
         // Validate the payload of the ListIdentifiers response:
         // 1a) There should be 2 items listed:
-        assertEquals("Wrong number of items on the first ListIdentifiers page",
-                2, ret.size());
+        assertEquals(2, ret.size(), "Wrong number of items on the first ListIdentifiers page");
         
         // 1b) The response contains a resumptionToken for the next page of items:
         String resumptionToken = responseXmlPath.getString("OAI-PMH.ListIdentifiers.resumptionToken");
-        assertNotNull("No resumption token in the ListIdentifiers response (has the jvm option dataverse.oai.server.maxidentifiers been configured?)", resumptionToken);
+        assertNotNull(resumptionToken, "No resumption token in the ListIdentifiers response (has the jvm option dataverse.oai.server.maxidentifiers been configured?)");
         
         // 1c) The total number of items in the set (5) is listed correctly:
         assertEquals(5, responseXmlPath.getInt("OAI-PMH.ListIdentifiers.resumptionToken.@completeListSize"));
@@ -650,12 +652,11 @@ public void testMultiRecordOaiSet() throws InterruptedException {
         
         // Validate the payload of the ListIdentifiers response:
         // 2a) There should still be 2 items listed:
-        assertEquals("Wrong number of items on the second ListIdentifiers page",
-                2, ret.size());
+        assertEquals(2, ret.size(), "Wrong number of items on the second ListIdentifiers page");
         
         // 2b) The response should contain a resumptionToken for the next page of items:
         resumptionToken = responseXmlPath.getString("OAI-PMH.ListIdentifiers.resumptionToken");
-        assertNotNull("No resumption token in the ListIdentifiers response", resumptionToken);
+        assertNotNull(resumptionToken, "No resumption token in the ListIdentifiers response");
         
         // 2c) The total number of items in the set (5) is listed correctly:
         assertEquals(5, responseXmlPath.getInt("OAI-PMH.ListIdentifiers.resumptionToken.@completeListSize"));
@@ -690,13 +691,12 @@ public void testMultiRecordOaiSet() throws InterruptedException {
         
         // Validate the payload of the ListIdentifiers response:
         // 3a) There should be only 1 item listed:
-        assertEquals("Wrong number of items on the final ListIdentifiers page", 
-                1, ret.size());
+        assertEquals(1, ret.size(), "Wrong number of items on the final ListIdentifiers page");
         
         // 3b) The response contains a resumptionToken for the next page of items:
         resumptionToken = responseXmlPath.getString("OAI-PMH.ListIdentifiers.resumptionToken");
-        assertNotNull("No resumption token in the final ListIdentifiers response", resumptionToken);
-        assertTrue("Non-empty resumption token in the final ListIdentifiers response", "".equals(resumptionToken));
+        assertNotNull(resumptionToken, "No resumption token in the final ListIdentifiers response");
+        assertEquals("", resumptionToken, "Non-empty resumption token in the final ListIdentifiers response");
         
         // 3c) The total number of items in the set (5) is still listed correctly:
         assertEquals(5, responseXmlPath.getInt("OAI-PMH.ListIdentifiers.resumptionToken.@completeListSize"));
@@ -717,8 +717,7 @@ public void testMultiRecordOaiSet() throws InterruptedException {
             allDatasetsListed = allDatasetsListed && persistentIdsInListIdentifiers.contains(persistentId); 
         }
         
-        assertTrue("Control datasets not properly listed in the paged ListIdentifiers response", 
-                allDatasetsListed);
+        assertTrue(allDatasetsListed, "Control datasets not properly listed in the paged ListIdentifiers response");
         
         // OK, it is safe to assume ListIdentifiers works as it should in page mode.
         
@@ -743,12 +742,11 @@ public void testMultiRecordOaiSet() throws InterruptedException {
         
         // Validate the payload of the ListRecords response:
         // 4a) There should be 2 items listed:
-        assertEquals("Wrong number of items on the first ListRecords page",
-                2, ret.size());
+        assertEquals(2, ret.size(), "Wrong number of items on the first ListRecords page");
         
         // 4b) The response contains a resumptionToken for the next page of items:
         resumptionToken = responseXmlPath.getString("OAI-PMH.ListRecords.resumptionToken");
-        assertNotNull("No resumption token in the ListRecords response (has the jvm option dataverse.oai.server.maxrecords been configured?)", resumptionToken);
+        assertNotNull(resumptionToken, "No resumption token in the ListRecords response (has the jvm option dataverse.oai.server.maxrecords been configured?)");
         
         // 4c) The total number of items in the set (5) is listed correctly:
         assertEquals(5, responseXmlPath.getInt("OAI-PMH.ListRecords.resumptionToken.@completeListSize"));
@@ -784,12 +782,11 @@ public void testMultiRecordOaiSet() throws InterruptedException {
         
         // Validate the payload of the ListRecords response:
         // 4a) There should still be 2 items listed:
-        assertEquals("Wrong number of items on the second ListRecords page",
-                2, ret.size());
+        assertEquals(2, ret.size(), "Wrong number of items on the second ListRecords page");
         
         // 4b) The response should contain a resumptionToken for the next page of items:
         resumptionToken = responseXmlPath.getString("OAI-PMH.ListRecords.resumptionToken");
-        assertNotNull("No resumption token in the ListRecords response", resumptionToken);
+        assertNotNull(resumptionToken, "No resumption token in the ListRecords response");
         
         // 4c) The total number of items in the set (5) is listed correctly:
         assertEquals(5, responseXmlPath.getInt("OAI-PMH.ListRecords.resumptionToken.@completeListSize"));
@@ -824,13 +821,12 @@ public void testMultiRecordOaiSet() throws InterruptedException {
         
         // Validate the payload of the ListRecords response:
         // 6a) There should be only 1 item listed:
-        assertEquals("Wrong number of items on the final ListRecords page", 
-                1, ret.size());
+        assertEquals(1, ret.size(), "Wrong number of items on the final ListRecords page");
         
         // 6b) The response contains a resumptionToken for the next page of items:
         resumptionToken = responseXmlPath.getString("OAI-PMH.ListRecords.resumptionToken");
-        assertNotNull("No resumption token in the final ListRecords response", resumptionToken);
-        assertTrue("Non-empty resumption token in the final ListRecords response", "".equals(resumptionToken));
+        assertNotNull(resumptionToken, "No resumption token in the final ListRecords response");
+        assertEquals("", resumptionToken, "Non-empty resumption token in the final ListRecords response");
         
         // 6c) The total number of items in the set (5) is still listed correctly:
         assertEquals(5, responseXmlPath.getInt("OAI-PMH.ListRecords.resumptionToken.@completeListSize"));
@@ -851,8 +847,7 @@ public void testMultiRecordOaiSet() throws InterruptedException {
             allDatasetsListed = allDatasetsListed && persistentIdsInListRecords.contains(persistentId); 
         }
         
-        assertTrue("Control datasets not properly listed in the paged ListRecords response", 
-                allDatasetsListed);
+        assertTrue(allDatasetsListed, "Control datasets not properly listed in the paged ListRecords response");
         
         // OK, it is safe to assume ListRecords works as it should in page mode
         // as well. 
@@ -863,7 +858,7 @@ public void testMultiRecordOaiSet() throws InterruptedException {
                 .header(UtilIT.API_TOKEN_HTTP_HEADER, adminUserAPIKey)
                 .delete(setPath);
         logger.info("deleteResponse.getStatusCode(): " + deleteResponse.getStatusCode());
-        assertEquals("Failed to delete the control multi-record set", 200, deleteResponse.getStatusCode());
+        assertEquals(200, deleteResponse.getStatusCode(), "Failed to delete the control multi-record set");
     }
     
     // TODO: 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java
index 89aeaa6b2ee..307eef48773 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java
@@ -1,31 +1,29 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.path.xml.XmlPath;
-import com.jayway.restassured.response.Response;
-import static edu.harvard.iq.dataverse.api.AccessIT.apiToken;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.path.xml.XmlPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static javax.ws.rs.core.Response.Status.NO_CONTENT;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static jakarta.ws.rs.core.Response.Status.NO_CONTENT;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.Assert;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class InReviewWorkflowIT {
 
     private static final Logger logger = Logger.getLogger(DatasetsIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
 
@@ -170,7 +168,7 @@ public void testCuratorSendsCommentsToAuthor() throws InterruptedException {
                 .statusCode(OK.getStatusCode());
         String citation = XmlPath.from(atomEntry.body().asString()).getString("bibliographicCitation");
         System.out.println("citation: " + citation);
-        Assert.assertTrue(citation.contains("A Better Title"));
+        assertTrue(citation.contains("A Better Title"));
 
         // The author tries to update the title while the dataset is in review via native.
         String pathToJsonFile = "doc/sphinx-guides/source/_static/api/dataset-update-metadata.json";
@@ -186,7 +184,7 @@ public void testCuratorSendsCommentsToAuthor() throws InterruptedException {
         String citationAuthorNative = XmlPath.from(atomEntryAuthorNative.body().asString()).getString("bibliographicCitation");
         System.out.println("citation: " + citationAuthorNative);
         // The author was unable to change the title.
-        Assert.assertTrue(citationAuthorNative.contains("A Better Title"));
+        assertTrue(citationAuthorNative.contains("A Better Title"));
 
         // The author remembers she forgot to add a file and tries to upload it while
         // the dataset is in review via native API but this fails.
@@ -239,7 +237,7 @@ public void testCuratorSendsCommentsToAuthor() throws InterruptedException {
                 // because the dataset is still locked when we try to edit it, 
                 // a few lines down. -- L.A. Oct. 2018  
                 // Changes to test for ingest lock and 3 seconds duration SEK 09/2019 #6128
-                assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest , UtilIT.sleepForLock(datasetId, "Ingest", curatorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+                assertTrue(UtilIT.sleepForLock(datasetId, "Ingest", curatorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest);
                // Thread.sleep(10000);
             }
         }
@@ -273,7 +271,7 @@ public void testCuratorSendsCommentsToAuthor() throws InterruptedException {
                 .statusCode(OK.getStatusCode());
         String citationCuratorNative = XmlPath.from(atomEntryCuratorNative.body().asString()).getString("bibliographicCitation");
         System.out.println("citation: " + citationCuratorNative);
-        Assert.assertTrue(citationCuratorNative.contains("newTitle"));
+        assertTrue(citationCuratorNative.contains("newTitle"));
         // END https://github.com/IQSS/dataverse/issues/4139
 
         // TODO: test where curator neglecting to leave a comment. Should fail with "reason for return" required.
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/IndexIT.java b/src/test/java/edu/harvard/iq/dataverse/api/IndexIT.java
index 313c4c2cfaf..2d946b4012b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/IndexIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/IndexIT.java
@@ -1,29 +1,29 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import static edu.harvard.iq.dataverse.api.UtilIT.API_TOKEN_HTTP_HEADER;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.util.ArrayList;
 import java.util.logging.Logger;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.NO_CONTENT;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static javax.ws.rs.core.Response.Status.OK;
-import static junit.framework.Assert.assertEquals;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.NO_CONTENT;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import org.hamcrest.CoreMatchers;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.After;
+import org.junit.jupiter.api.AfterEach;
 
 public class IndexIT {
 
     private static final Logger logger = Logger.getLogger(IndexIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
 
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
@@ -100,11 +100,11 @@ public void testIndexStatus() {
       
     }
    
-    @After
+    @AfterEach
     public void tearDownDataverse() {
         }
 
-    @AfterClass
+    @AfterAll
     public static void cleanup() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java
index 73a1171bf1b..142b979ef3c 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java
@@ -1,9 +1,10 @@
 package edu.harvard.iq.dataverse.api;
 
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.response.Response;
+import static io.restassured.RestAssured.given;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.notNullValue;
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InvalidCharactersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InvalidCharactersIT.java
index 2fb412ef1cc..2cd7942cb5f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/InvalidCharactersIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/InvalidCharactersIT.java
@@ -1,22 +1,22 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import org.hamcrest.Matchers;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class InvalidCharactersIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/IpGroupsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/IpGroupsIT.java
index 88b8a9fc458..1c7e7b05650 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/IpGroupsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/IpGroupsIT.java
@@ -1,27 +1,27 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
-import static junit.framework.Assert.assertEquals;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class IpGroupsIT {
 
     private static final Logger logger = Logger.getLogger(IpGroupsIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/LazyRefTest.java b/src/test/java/edu/harvard/iq/dataverse/api/LazyRefTest.java
index 72cf6fae875..382a80a3493 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/LazyRefTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/LazyRefTest.java
@@ -3,8 +3,7 @@
  */
 package edu.harvard.iq.dataverse.api;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/LicensesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/LicensesIT.java
index d6bfdb96777..8d1af322cbd 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/LicensesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/LicensesIT.java
@@ -1,44 +1,24 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import edu.harvard.iq.dataverse.DataFile;
-import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider;
-import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.GitHubOAuth2AP;
-import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.OrcidOAuth2AP;
-import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import org.junit.Test;
-import org.junit.BeforeClass;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.BeforeAll;
 
-import java.util.Map;
-import java.util.UUID;
 import java.util.logging.Logger;
 
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static org.junit.Assert.*;
-import static org.hamcrest.CoreMatchers.equalTo;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.notNullValue;
-import static org.junit.Assert.assertTrue;
-import org.junit.Ignore;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class LicensesIT {
 
     private static final Logger logger = Logger.getLogger(LicensesIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/LinkIT.java b/src/test/java/edu/harvard/iq/dataverse/api/LinkIT.java
index 76e9b7d6bc8..907d3dec4bc 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/LinkIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/LinkIT.java
@@ -1,22 +1,22 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import edu.harvard.iq.dataverse.util.BundleUtil;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+
 import java.util.logging.Logger;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class LinkIT {
 
     private static final Logger logger = Logger.getLogger(LinkIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/LogoutIT.java b/src/test/java/edu/harvard/iq/dataverse/api/LogoutIT.java
index f07ce970914..53fa500a328 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/LogoutIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/LogoutIT.java
@@ -1,16 +1,16 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
-import org.junit.BeforeClass;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
+import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Test;
 
-import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
+import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class LogoutIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java
index c210d471ce2..7a113fd4caa 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java
@@ -1,21 +1,21 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
 import java.io.File;
 import java.io.IOException;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
 import org.apache.commons.io.FileUtils;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
 public class MakeDataCountApiIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -181,7 +181,7 @@ public void testMakeDataCountGetMetric() throws IOException {
      *
      * update dvobject set authority = '10.7910' where id = 10;
      */
-    @Ignore
+    @Disabled
     @Test
     public void testMakeDataCountDownloadCitation() {
         String idOrPersistentIdOfDataset = "doi:10.7910/DVN/HQZOOB";
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java
index c958c339daf..c301e158b4e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
 import org.hamcrest.CoreMatchers;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Test;
 
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assumptions.assumeFalse;
 import static org.junit.jupiter.api.Assumptions.assumeTrue;
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java
index ca20799de90..e3328eefb4a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java
@@ -1,28 +1,29 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.metrics.MetricsUtil;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.OK;
-import org.junit.AfterClass;
-import static org.junit.Assert.assertEquals;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import org.junit.jupiter.api.AfterAll;
+
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 //TODO: These tests are fairly flawed as they don't actually add data to compare on.
 //To improve these tests we should try adding data and see if the number DOESN'T
 //go up to show that the caching worked
 public class MetricsIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
         UtilIT.clearMetricCache();
     }
 
-    @AfterClass
+    @AfterAll
     public static void cleanUpClass() {
         UtilIT.clearMetricCache();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java
index 1ba721fd410..f7135ce7f3b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java
@@ -1,32 +1,31 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import static com.jayway.restassured.path.json.JsonPath.with;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import static io.restassured.path.json.JsonPath.with;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import java.io.StringReader;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
 import org.hamcrest.CoreMatchers;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.Assert;
-import static org.junit.Assert.assertEquals;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class MoveIT {
 
     private static final Logger logger = Logger.getLogger(MoveIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -279,8 +278,8 @@ public void testMoveLinkedDataset() {
                 .body("message", equalTo("Use the query parameter forceMove=true to complete the move. This dataset is linked to the new host dataverse or one of its parents. This move would remove the link to this dataset. "));
 
         JsonObject linksBeforeData = Json.createReader(new StringReader(getLinksBefore.asString())).readObject();
-        Assert.assertEquals("OK", linksBeforeData.getString("status"));
-        Assert.assertEquals(dataverse2Alias + " (id " + dataverse2Id + ")", linksBeforeData.getJsonObject("data").getJsonArray("dataverses that link to dataset id " + datasetId).getString(0));
+        assertEquals("OK", linksBeforeData.getString("status"));
+        assertEquals(dataverse2Alias + " (id " + dataverse2Id + ")", linksBeforeData.getJsonObject("data").getJsonArray("dataverses that link to dataset id " + datasetId).getString(0));
 
         boolean forceMove = true;
         Response forceMoveLinkedDataset = UtilIT.moveDataset(datasetId.toString(), dataverse2Alias, forceMove, superuserApiToken);
@@ -301,15 +300,15 @@ public void testMoveLinkedDataset() {
                 .statusCode(OK.getStatusCode())
                 .body("feed.entry[0].id", CoreMatchers.endsWith(datasetPid));
 
-        UtilIT.sleepForReindex(datasetPid, superuserApiToken, 10);
+        UtilIT.sleepForReindex(datasetPid, superuserApiToken, 20);
         Response getLinksAfter = UtilIT.getDatasetLinks(datasetPid, superuserApiToken);
         getLinksAfter.prettyPrint();
         getLinksAfter.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
         JsonObject linksAfterData = Json.createReader(new StringReader(getLinksAfter.asString())).readObject();
-        Assert.assertEquals("OK", linksAfterData.getString("status"));
-        Assert.assertEquals(0, linksAfterData.getJsonObject("data").getJsonArray("dataverses that link to dataset id " + datasetId).size());
+        assertEquals("OK", linksAfterData.getString("status"));
+        assertEquals(0, linksAfterData.getJsonObject("data").getJsonArray("dataverses that link to dataset id " + datasetId).size());
 
     }
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/NetcdfIT.java b/src/test/java/edu/harvard/iq/dataverse/api/NetcdfIT.java
index 89ae1b9202e..d4dba236051 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/NetcdfIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/NetcdfIT.java
@@ -1,25 +1,25 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import javax.json.Json;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
-import static javax.ws.rs.core.Response.Status.OK;
+import jakarta.json.Json;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import org.hamcrest.CoreMatchers;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class NetcdfIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/NotificationsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/NotificationsIT.java
index 09a14e2d6ad..606bdc6ce5d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/NotificationsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/NotificationsIT.java
@@ -1,20 +1,20 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.util.logging.Logger;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class NotificationsIT {
 
     private static final Logger logger = Logger.getLogger(NotificationsIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/PidsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/PidsIT.java
index c7ca70e0e1d..808346f021c 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/PidsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/PidsIT.java
@@ -1,14 +1,14 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
 /**
  * In order to execute this test code you must be configured with DataCite
@@ -16,12 +16,12 @@
  */
 public class PidsIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testGetPid() {
         String pid = "";
@@ -94,7 +94,7 @@ public void testReservePid() {
          */
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testDeletePid() {
         String pid = "";
@@ -112,7 +112,7 @@ public void testDeletePid() {
         deletePid.prettyPrint();
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testCannotPublishUntilReserved() {
         Response createUser = UtilIT.createRandomUser();
@@ -154,7 +154,7 @@ public void testCannotPublishUntilReserved() {
                 .statusCode(FORBIDDEN.getStatusCode());
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testDeleteDraftPidOnDelete() {
         Response createUser = UtilIT.createRandomUser();
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java
index 52143eb9981..9b3b66538d7 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java
@@ -1,32 +1,31 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static junit.framework.Assert.assertEquals;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.notNullValue;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class ProvIT {
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
 
     
-    @Test 
+    @Test
     public void testFreeformDraftActions() {
         Response createDepositor = UtilIT.createRandomUser();
         createDepositor.prettyPrint();
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/RemoteStoreIT.java b/src/test/java/edu/harvard/iq/dataverse/api/RemoteStoreIT.java
index ae5bc8b7316..f653b358b33 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/RemoteStoreIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/RemoteStoreIT.java
@@ -1,17 +1,17 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class RemoteStoreIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java
index d1e2ffb2426..8b5ac917dea 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java
@@ -1,14 +1,16 @@
 
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.util.logging.Logger;
-import static junit.framework.Assert.assertEquals;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 /**
  *
@@ -18,7 +20,7 @@ public class RolesIT {
     
     private static final Logger logger = Logger.getLogger(AdminIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java
index 29cb6895bba..8b1e96f3622 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java
@@ -1,19 +1,14 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.util.logging.Logger;
-import static javax.ws.rs.core.Response.Status.OK;
-import static junit.framework.Assert.assertEquals;
-import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.hamcrest.CoreMatchers.nullValue;
+
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
 import static org.hamcrest.Matchers.startsWith;
-import org.junit.After;
-import org.junit.Assert;
 
 /**
  *  NOTE: This test WILL NOT pass if your installation is not configured for Amazon S3 storage.
@@ -25,7 +20,7 @@ public class S3AccessIT {
     
     private static final Logger logger = Logger.getLogger(S3AccessIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
         
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
index f3def90a005..125753296a2 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
@@ -1,49 +1,49 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.AfterEach;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import java.io.File;
 import java.io.InputStream;
 import java.io.UnsupportedEncodingException;
 import java.util.Base64;
-import javax.json.JsonArray;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
+import jakarta.json.JsonArray;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
 import org.hamcrest.CoreMatchers;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
 import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
 import java.awt.image.BufferedImage;
 import java.io.IOException;
-import static junit.framework.Assert.assertEquals;
 import static java.lang.Thread.sleep;
 import javax.imageio.ImageIO;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
 import org.hamcrest.Matchers;
-import org.junit.After;
-import static org.junit.Assert.assertNotEquals;
-import static java.lang.Thread.sleep;
-import javax.json.JsonObjectBuilder;
+
+import jakarta.json.JsonObjectBuilder;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class SearchIT {
 
     private static final Logger logger = Logger.getLogger(SearchIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
 
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
@@ -105,7 +105,7 @@ public void testSearchPermisions() throws InterruptedException {
         assertEquals(200, grantUser2AccessOnDataset.getStatusCode());
 
         String searchPart = "id:dataset_" + datasetId1 + "_draft";        
-        assertTrue("Failed test if search exceeds max duration " + searchPart , UtilIT.sleepForSearch(searchPart, apiToken2, "", UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); 
+        assertTrue(UtilIT.sleepForSearch(searchPart, apiToken2, "", UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if search exceeds max duration " + searchPart);
         
         Response shouldBeVisibleToUser2 = UtilIT.search("id:dataset_" + datasetId1 + "_draft", apiToken2);
         shouldBeVisibleToUser2.prettyPrint();
@@ -311,7 +311,7 @@ public void testSearchDynamicMetadataFields() {
         allFieldsFromCitation.then().assertThat()
                 .body("data.items[0].metadataBlocks.citation.displayName", CoreMatchers.equalTo("Citation Metadata"))
                 // Many fields returned, all of the citation block that has been filled in.
-                .body("data.items[0].metadataBlocks.citation.fields.typeName.size", Matchers.equalTo(5))
+                .body("data.items[0].metadataBlocks.citation.fields", Matchers.hasSize(5))
                 .statusCode(OK.getStatusCode());
 
     }
@@ -814,7 +814,7 @@ public void testNestedSubtree() {
                 .statusCode(OK.getStatusCode());
 
         try {
-            Thread.sleep(2000);
+            Thread.sleep(4000);
         } catch (InterruptedException ex) {
             /**
              * This sleep is here because dataverseAlias2 is showing with
@@ -915,7 +915,7 @@ public void testCuratorCardDataversePopulation() throws InterruptedException {
         String searchPart = "*"; 
         
         Response searchPublishedSubtreeSuper = UtilIT.search(searchPart, apiTokenSuper, "&subtree="+parentDataverseAlias);
-        assertTrue("Failed test if search exceeds max duration " + searchPart , UtilIT.sleepForSearch(searchPart, apiToken, "&subtree="+parentDataverseAlias, UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); 
+        assertTrue(UtilIT.sleepForSearch(searchPart, apiToken, "&subtree="+parentDataverseAlias, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if search exceeds max duration " + searchPart);
         searchPublishedSubtreeSuper.prettyPrint();
         searchPublishedSubtreeSuper.then().assertThat()
                 .statusCode(OK.getStatusCode())
@@ -966,6 +966,9 @@ public void testSubtreePermissions() {
         Response datasetAsJson2 = UtilIT.nativeGet(datasetId2, apiToken);
         datasetAsJson2.then().assertThat()
                 .statusCode(OK.getStatusCode());
+        
+        // Wait a little while for the index to pick up the datasets, otherwise timing issue with searching for it.
+        UtilIT.sleepForReindex(datasetId2.toString(), apiToken, 2);
 
         String identifier = JsonPath.from(datasetAsJson.getBody().asString()).getString("data.identifier");
         String identifier2 = JsonPath.from(datasetAsJson2.getBody().asString()).getString("data.identifier"); 
@@ -1282,7 +1285,7 @@ public void testGeospatialSearchInvalid() {
 
     }
 
-    @After
+    @AfterEach
     public void tearDownDataverse() {
         File treesThumb = new File("scripts/search/data/binary/trees.png.thumb48");
         treesThumb.delete();
@@ -1292,7 +1295,7 @@ public void tearDownDataverse() {
         dataverseprojectThumb.delete();
     }
 
-    @AfterClass
+    @AfterAll
     public static void cleanup() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SignedUrlAuthMechanismIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SignedUrlAuthMechanismIT.java
index e30bf40082b..dd5ddbfa7d8 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SignedUrlAuthMechanismIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SignedUrlAuthMechanismIT.java
@@ -1,18 +1,18 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
-import org.junit.BeforeClass;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
+import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Test;
 
-import static com.jayway.restassured.RestAssured.get;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static io.restassured.RestAssured.get;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class SignedUrlAuthMechanismIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java
index e22d0740c48..17eba4770f1 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java
@@ -1,28 +1,28 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.http.ContentType;
+import io.restassured.RestAssured;
+import io.restassured.http.ContentType;
 
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.response.Response;
+import static io.restassured.RestAssured.given;
+import io.restassured.response.Response;
 
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
-import static org.junit.Assert.assertTrue;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class SignpostingIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SiteMapIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SiteMapIT.java
index 723f05d3802..1b9025cab82 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SiteMapIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SiteMapIT.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import io.restassured.response.Response;
 
 public class SiteMapIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/StorageSitesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/StorageSitesIT.java
index a33d7d60263..89208997ee3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/StorageSitesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/StorageSitesIT.java
@@ -1,17 +1,17 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.locality.StorageSite;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class StorageSitesIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java
index 3e3161cb0bc..39156f1c59b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java
@@ -1,9 +1,8 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import edu.harvard.iq.dataverse.GlobalId;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.api.datadeposit.SwordConfigurationImpl;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.io.File;
@@ -13,27 +12,28 @@
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static javax.ws.rs.core.Response.Status.METHOD_NOT_ALLOWED;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
-import static javax.ws.rs.core.Response.Status.NO_CONTENT;
-import static javax.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static jakarta.ws.rs.core.Response.Status.METHOD_NOT_ALLOWED;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
+import static jakarta.ws.rs.core.Response.Status.NO_CONTENT;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.nullValue;
 import static org.hamcrest.Matchers.endsWith;
 import static org.hamcrest.Matchers.startsWith;
-import org.junit.AfterClass;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 /**
  * In all these tests you should never see something like "[long string exposing
@@ -55,7 +55,7 @@ public class SwordIT {
     private static final String rootDvNotPublished = "Many of these SWORD tests require that the root dataverse collection has been published. Publish the root dataverse and then re-run these tests.";
     private static final String rootDvLackPermissions = "Many of these SWORD tests require you set permissions for the root dataverse collection: \"Anyone with a Dataverse account can add sub dataverses and datasets\" + curator role for new datasets. Please set and re-run these tests.";
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
         boolean testAgainstDev1 = false;
@@ -73,7 +73,7 @@ public static void setUpClass() {
         Response checkRootDataverse = UtilIT.listDatasetsViaSword(rootDataverseAlias, apitoken);
         //checkRootDataverse.prettyPrint();
         checkRootDataverse.then().assertThat().statusCode(OK.getStatusCode());
-        assumeTrue(rootDvNotPublished,  checkRootDataverse.getBody().xmlPath().getBoolean("feed.dataverseHasBeenReleased"));
+        assumeTrue(checkRootDataverse.getBody().xmlPath().getBoolean("feed.dataverseHasBeenReleased"), rootDvNotPublished);
         
         // check that root dataverse has permissions for any user set to dataverse + dataset creator (not admin, not curator!)
         checkRootDataverse = UtilIT.getRoleAssignmentsOnDataverse(rootDataverseAlias, apiTokenSuperuser);
@@ -88,7 +88,7 @@ public static void setUpClass() {
                 break;
             }
         }
-        assumeTrue(rootDvLackPermissions, properPermissionsSet);
+        assumeTrue(properPermissionsSet, rootDvLackPermissions);
 
     }
 
@@ -968,7 +968,7 @@ public void testDeleteFiles() {
 
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
         // cleanup, allow custom terms again (delete because it defaults to true)
         UtilIT.deleteSetting(SettingsServiceBean.Key.AllowCustomTermsOfUse);
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/TabularIT.java b/src/test/java/edu/harvard/iq/dataverse/api/TabularIT.java
index 512080ae569..25eec16e17b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/TabularIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/TabularIT.java
@@ -1,33 +1,31 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import static edu.harvard.iq.dataverse.api.AccessIT.apiToken;
-import static edu.harvard.iq.dataverse.api.AccessIT.datasetId;
-import static edu.harvard.iq.dataverse.api.AccessIT.tabFile3NameRestricted;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+
 import java.io.File;
 import java.util.Arrays;
 import java.util.logging.Logger;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
 public class TabularIT {
 
     private static final Logger logger = Logger.getLogger(TabularIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testTabularFile() throws InterruptedException {
         Response createUser = UtilIT.createRandomUser();
@@ -63,7 +61,7 @@ public void testTabularFile() throws InterruptedException {
 
         // Give file time to ingest
         
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest);
       //  Thread.sleep(10000);
 
         Response fileMetadataNoFormat = UtilIT.getFileMetadata(fileIdAsString, null, apiToken);
@@ -102,7 +100,7 @@ public void testTabularFile() throws InterruptedException {
 
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void test50by1000() {
         // cp scripts/search/data/tabular/50by1000.dta /tmp
@@ -113,7 +111,7 @@ public void test50by1000() {
         assertEquals("NVARS: 50", response.body().asString().split("\n")[0]);
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testStata13TinyFile() {
         // cp scripts/search/data/tabular/120745.dta /tmp
@@ -124,7 +122,7 @@ public void testStata13TinyFile() {
         assertEquals("NVARS: 1", response.body().asString().split("\n")[0]);
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testStata13Auto() {
         // curl https://www.stata-press.com/data/r13/auto.dta > /tmp/stata13-auto.dta
@@ -135,7 +133,7 @@ public void testStata13Auto() {
         assertEquals("NVARS: 12", response.body().asString().split("\n")[0]);
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testStata14OpenSourceAtHarvard() {
         // https://dataverse.harvard.edu/file.xhtml?fileId=3040230 converted to Stata 14: 2017-07-31.tab
@@ -148,7 +146,7 @@ public void testStata14OpenSourceAtHarvard() {
         assertEquals("NVARS: 10", response.body().asString().split("\n")[0]);
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testStata14Aggregated() {
         // https://dataverse.harvard.edu/file.xhtml?fileId=3140457 Stata 14: 2018_04_06_Aggregated_dataset_v2.dta
@@ -160,7 +158,7 @@ public void testStata14Aggregated() {
         assertEquals("NVARS: 227", response.body().asString().split("\n")[0]);
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testStata14MmPublic() {
         // TODO: This file was downloaded at random. We could keep trying to get it to ingest.
@@ -175,7 +173,7 @@ public void testStata14MmPublic() {
         assertEquals("NVARS: 12", response.body().asString().split("\n")[0]);
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testStata15() {
         // for i in `echo {0..33000}`; do echo -n "var$i,"; done > 33k.csv
@@ -187,7 +185,7 @@ public void testStata15() {
         assertEquals("NVARS: 33001", response.body().asString().split("\n")[0]);
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testStata13Multiple() {
         String fileType = "application/x-stata-13";
@@ -207,7 +205,7 @@ public void testStata13Multiple() {
         }
     }
     
-    @Ignore
+    @Disabled
     @Test
     public void testStata14Multiple() {
         String fileType = "application/x-stata-14";
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ThumbnailsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ThumbnailsIT.java
index ffa432de63b..8d5b6d86cd9 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/ThumbnailsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/ThumbnailsIT.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.response.Response;
+import io.restassured.response.Response;
 import org.hamcrest.CoreMatchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class ThumbnailsIT {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java
index 07e8ef41d92..5880b08e5c2 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java
@@ -1,33 +1,33 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import io.restassured.http.ContentType;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.UUID;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static junit.framework.Assert.assertEquals;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.hamcrest.CoreMatchers.containsString;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.Matchers.contains;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class UsersIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
        /* 
@@ -162,7 +162,7 @@ public void testMergeAccounts(){
         Integer tabFile3IdRestrictedNew = JsonPath.from(tab3AddResponse.body().asString()).getInt("data.files[0].dataFile.id");
         
         //Sleep while dataset locked for ingest
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + tabFile3NameRestrictedNew , UtilIT.sleepForLock(datasetIdNew.longValue(), "Ingest", superuserApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetIdNew.longValue(), "Ingest", superuserApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + tabFile3NameRestrictedNew);
 
         Response restrictResponse = UtilIT.restrictFile(tabFile3IdRestrictedNew.toString(), true, superuserApiToken);
         restrictResponse.prettyPrint();
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
index 35809d5e490..e47971f9b92 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
@@ -1,17 +1,15 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.http.ContentType;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.util.UUID;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.ws.rs.client.Client;
-import javax.ws.rs.client.ClientBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+
 import java.io.File;
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
@@ -20,12 +18,12 @@
 import java.time.LocalDateTime;
 import java.util.logging.Level;
 import edu.harvard.iq.dataverse.api.datadeposit.SwordConfigurationImpl;
-import com.jayway.restassured.path.xml.XmlPath;
+import io.restassured.path.xml.XmlPath;
 import edu.harvard.iq.dataverse.mydata.MyDataFilterParams;
 import org.apache.commons.lang3.StringUtils;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import com.jayway.restassured.specification.RequestSpecification;
+import io.restassured.specification.RequestSpecification;
 import java.util.List;
 import com.mashape.unirest.http.Unirest;
 import com.mashape.unirest.http.exceptions.UnirestException;
@@ -41,19 +39,19 @@
 import org.hamcrest.Description;
 import org.hamcrest.Matcher;
 
-import static com.jayway.restassured.RestAssured.put;
-import static com.jayway.restassured.path.xml.XmlPath.from;
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.put;
+import static io.restassured.path.xml.XmlPath.from;
+import static io.restassured.RestAssured.given;
 import edu.harvard.iq.dataverse.DatasetField;
-import edu.harvard.iq.dataverse.DatasetFieldConstant;
 import edu.harvard.iq.dataverse.DatasetFieldType;
 import edu.harvard.iq.dataverse.DatasetFieldValue;
 import edu.harvard.iq.dataverse.util.StringUtil;
+
 import java.io.StringReader;
 import java.util.Collections;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class UtilIT {
 
@@ -67,6 +65,7 @@ public class UtilIT {
     private static final String EMPTY_STRING = "";
     public static final int MAXIMUM_INGEST_LOCK_DURATION = 15;
     public static final int MAXIMUM_PUBLISH_LOCK_DURATION = 15;
+    public static final int MAXIMUM_IMPORT_DURATION = 1;
     
     private static SwordConfigurationImpl swordConfiguration = new SwordConfigurationImpl();
     
@@ -564,7 +563,14 @@ static Response updateDatasetPIDMetadata(String persistentId,  String apiToken)
                 .post("/api/datasets/:persistentId/modifyRegistrationMetadata/?persistentId=" + persistentId);
         return response;
     }
-    
+
+    /**
+     * Deprecated because once there are new fields in the database that Solr
+     * doesn't know about, dataset creation could be prevented, or at least
+     * subsequent search operations could fail because the dataset can't be
+     * indexed.
+     */
+    @Deprecated    
     static Response loadMetadataBlock(String apiToken, byte[] body) {
         return given()
           .header(API_TOKEN_HTTP_HEADER, apiToken)
@@ -1247,6 +1253,12 @@ static Response destroyDataset(Integer datasetId, String apiToken) {
                 .delete("/api/datasets/" + datasetId + "/destroy");
     }
 
+    static Response destroyDataset(String pid, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .delete("/api/datasets/:persistentId/destroy?persistentId=" + pid);
+    }
+
     static Response deleteFile(Integer fileId, String apiToken) {
         return given()
                 .auth().basic(apiToken, EMPTY_STRING)
@@ -2577,9 +2589,24 @@ static boolean sleepForReexport(String idOrPersistentId, String apiToken, int du
         return i <= repeats;
 
     }
-    
-    
-    
+
+    // Modeled after sleepForLock but the dataset isn't locked.
+    // We have to sleep or we can't perform the next operation.
+    static Boolean sleepForDeadlock(int duration) {
+        int i = 0;
+        do {
+            try {
+                Thread.sleep(1000);
+                i++;
+                if (i > duration) {
+                    break;
+                }
+            } catch (InterruptedException ex) {
+                Logger.getLogger(UtilIT.class.getName()).log(Level.SEVERE, null, ex);
+            }
+        } while (true);
+        return i <= duration;
+    }
     
     //Helper function that returns true if a given search returns a non-zero response within a fixed time limit
     // a given duration returns false if still zero results after given duration
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanismTest.java
index 5c252f67f0c..486697664e6 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanismTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanismTest.java
@@ -11,7 +11,7 @@
 import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
-import javax.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestContext;
 
 import static edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism.ACCESS_DATAFILE_PATH_PREFIX;
 import static edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism.RESPONSE_MESSAGE_BAD_API_KEY;
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java
index 0370daa5ea2..6eaf769e0e8 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java
@@ -16,7 +16,7 @@
 import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
-import javax.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestContext;
 
 import java.io.IOException;
 import java.util.Collections;
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanismTest.java
index c09917c55c6..b3435d53ca2 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanismTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanismTest.java
@@ -7,7 +7,7 @@
 import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
-import javax.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestContext;
 
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.Matchers.equalTo;
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanismTest.java
index 8518d49c687..74db6e544da 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanismTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanismTest.java
@@ -9,7 +9,7 @@
 import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
-import javax.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestContext;
 
 import static edu.harvard.iq.dataverse.api.auth.SignedUrlAuthMechanism.RESPONSE_MESSAGE_BAD_SIGNED_URL;
 import static org.junit.jupiter.api.Assertions.*;
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanismTest.java
index 712323a6d5c..3f90fa73fa9 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanismTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanismTest.java
@@ -8,7 +8,7 @@
 import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
-import javax.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestContext;
 
 import static edu.harvard.iq.dataverse.api.auth.WorkflowKeyAuthMechanism.RESPONSE_MESSAGE_BAD_WORKFLOW_KEY;
 import static org.junit.jupiter.api.Assertions.*;
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ApiKeyContainerRequestTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ApiKeyContainerRequestTestFake.java
index 6046b217c8b..3afa1a06be3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ApiKeyContainerRequestTestFake.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ApiKeyContainerRequestTestFake.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.api.auth.doubles;
 
-import javax.ws.rs.core.UriInfo;
+import jakarta.ws.rs.core.UriInfo;
 
 import static edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism.DATAVERSE_API_KEY_REQUEST_HEADER_NAME;
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ApiKeyUriInfoTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ApiKeyUriInfoTestFake.java
index 8761bd25f4b..495d3810adb 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ApiKeyUriInfoTestFake.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ApiKeyUriInfoTestFake.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.api.auth.doubles;
 
-import javax.ws.rs.core.MultivaluedHashMap;
-import javax.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.core.MultivaluedHashMap;
+import jakarta.ws.rs.core.MultivaluedMap;
 
 import static edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism.DATAVERSE_API_KEY_REQUEST_PARAM_NAME;
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/BearerTokenKeyContainerRequestTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/BearerTokenKeyContainerRequestTestFake.java
index 132c165696d..04a66e851c5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/BearerTokenKeyContainerRequestTestFake.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/BearerTokenKeyContainerRequestTestFake.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.api.auth.doubles;
 
-import javax.ws.rs.core.HttpHeaders;
+import jakarta.ws.rs.core.HttpHeaders;
 
 public class BearerTokenKeyContainerRequestTestFake extends ContainerRequestTestFake {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ContainerRequestTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ContainerRequestTestFake.java
index 10a33cc5d15..74f2e9dbb41 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ContainerRequestTestFake.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ContainerRequestTestFake.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.api.auth.doubles;
 
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.core.*;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.*;
 import java.io.InputStream;
 import java.net.URI;
 import java.util.*;
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/SignedUrlContainerRequestTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/SignedUrlContainerRequestTestFake.java
index 01392b91cf3..df37f6723d3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/SignedUrlContainerRequestTestFake.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/SignedUrlContainerRequestTestFake.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.api.auth.doubles;
 
-import javax.ws.rs.core.UriInfo;
+import jakarta.ws.rs.core.UriInfo;
 
 public class SignedUrlContainerRequestTestFake extends ContainerRequestTestFake {
     private final UriInfo uriInfo;
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/SignedUrlUriInfoTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/SignedUrlUriInfoTestFake.java
index 7b19325de42..fa9da7fc8de 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/SignedUrlUriInfoTestFake.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/SignedUrlUriInfoTestFake.java
@@ -2,14 +2,14 @@
 
 import edu.harvard.iq.dataverse.util.UrlSignerUtil;
 
-import javax.ws.rs.core.MultivaluedHashMap;
-import javax.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.core.MultivaluedHashMap;
+import jakarta.ws.rs.core.MultivaluedMap;
 
 import java.net.URI;
 
 import static edu.harvard.iq.dataverse.util.UrlSignerUtil.SIGNED_URL_TOKEN;
 import static edu.harvard.iq.dataverse.util.UrlSignerUtil.SIGNED_URL_USER;
-import static javax.ws.rs.HttpMethod.GET;
+import static jakarta.ws.rs.HttpMethod.GET;
 
 public class SignedUrlUriInfoTestFake extends UriInfoTestFake {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/UriInfoTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/UriInfoTestFake.java
index ba7bffbb5d0..51d20083ec8 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/UriInfoTestFake.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/UriInfoTestFake.java
@@ -1,9 +1,9 @@
 package edu.harvard.iq.dataverse.api.auth.doubles;
 
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.PathSegment;
-import javax.ws.rs.core.UriBuilder;
-import javax.ws.rs.core.UriInfo;
+import jakarta.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.core.PathSegment;
+import jakarta.ws.rs.core.UriBuilder;
+import jakarta.ws.rs.core.UriInfo;
 import java.net.URI;
 import java.util.List;
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/WorkflowKeyContainerRequestTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/WorkflowKeyContainerRequestTestFake.java
index b726edbd3da..2679ab1cc1d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/WorkflowKeyContainerRequestTestFake.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/WorkflowKeyContainerRequestTestFake.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.api.auth.doubles;
 
-import javax.ws.rs.core.UriInfo;
+import jakarta.ws.rs.core.UriInfo;
 
 import static edu.harvard.iq.dataverse.api.auth.WorkflowKeyAuthMechanism.DATAVERSE_WORKFLOW_KEY_REQUEST_HEADER_NAME;
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/WorkflowKeyUriInfoTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/WorkflowKeyUriInfoTestFake.java
index 73d55fe45bc..7bfd4326417 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/WorkflowKeyUriInfoTestFake.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/WorkflowKeyUriInfoTestFake.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.api.auth.doubles;
 
-import javax.ws.rs.core.MultivaluedHashMap;
-import javax.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.core.MultivaluedHashMap;
+import jakarta.ws.rs.core.MultivaluedMap;
 
 import static edu.harvard.iq.dataverse.api.auth.WorkflowKeyAuthMechanism.DATAVERSE_WORKFLOW_KEY_REQUEST_PARAM_NAME;
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/dto/FieldDTOTest.java b/src/test/java/edu/harvard/iq/dataverse/api/dto/FieldDTOTest.java
index 45f0ef52b33..0202f11d469 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/dto/FieldDTOTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/dto/FieldDTOTest.java
@@ -11,12 +11,12 @@
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
-import junit.framework.Assert;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  *
@@ -27,15 +27,15 @@ public class FieldDTOTest {
     public FieldDTOTest() {
     }
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
     
-    @Before
+    @BeforeEach
     public void setUp() {
        
         Set<FieldDTO> authorFields = new HashSet<>();
@@ -49,7 +49,7 @@ public void setUp() {
         
     }
     
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
@@ -60,7 +60,7 @@ public void tearDown() {
     public void testSinglePrimitive() {
         FieldDTO affil = FieldDTO.createPrimitiveFieldDTO("authorAffiliation", "Top");
         System.out.println(affil.getSinglePrimitive());
-        Assert.assertEquals("Top", affil.getSinglePrimitive());
+        assertEquals("Top", affil.getSinglePrimitive());
         
     }
 
@@ -78,10 +78,10 @@ public void testMultipleVocab() {
         value.add("EventList");
         astroType.setMultipleVocab(value);
         
-        Assert.assertEquals(value, astroType.getMultipleVocab());
+        assertEquals(value, astroType.getMultipleVocab());
         String jsonStr = gson.toJson(astroType);
         FieldDTO astroType2 = gson.fromJson(jsonStr, FieldDTO.class);
-        Assert.assertEquals(astroType, astroType2);
+        assertEquals(astroType, astroType2);
         
     }
 
@@ -116,7 +116,7 @@ public void testSetMultipleCompound() {
         compoundField.setTypeName("author");
         compoundField.setMultipleCompound(authorList);
         
-        Assert.assertEquals(compoundField.getMultipleCompound(), authorList);
+        assertEquals(compoundField.getMultipleCompound(), authorList);
     }
 
     /**
@@ -132,8 +132,8 @@ public void testSetSingleCompound() {
         
         FieldDTO compoundField = new FieldDTO();
         compoundField.setSingleCompound(authorFields.toArray(new FieldDTO[]{}));
-        Set<FieldDTO>  returned = compoundField.getSingleCompound();   
-        Assert.assertTrue(returned.equals(authorFields));
+        Set<FieldDTO>  returned = compoundField.getSingleCompound();
+        assertEquals(returned, authorFields);
        
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/filesystem/FileRecordJobIT.java b/src/test/java/edu/harvard/iq/dataverse/api/filesystem/FileRecordJobIT.java
index 7b9f7d5c155..3257204f460 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/filesystem/FileRecordJobIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/filesystem/FileRecordJobIT.java
@@ -20,24 +20,25 @@
 */
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.http.ContentType;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.api.UtilIT;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity;
 import edu.harvard.iq.dataverse.batch.entities.StepExecutionEntity;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.Ignore;
-
-import javax.batch.runtime.BatchStatus;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+
+import jakarta.batch.runtime.BatchStatus;
+
 import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileInputStream;
@@ -55,11 +56,11 @@
 import java.util.Random;
 import java.util.UUID;
 
-import static com.jayway.restassured.RestAssured.given;
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertTrue;
-import static junit.framework.Assert.fail;
+import static io.restassured.RestAssured.given;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.hamcrest.Matchers.equalTo;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 /**
  * Batch File System Import Job Integration Tests
@@ -92,7 +93,7 @@ public class FileRecordJobIT {
     private static final String API_TOKEN_HTTP_HEADER = "X-Dataverse-key";
     private static final String BUILTIN_USER_KEY = "burrito";
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() throws Exception {
 
         // this allows for testing on dataverse staging servers via jvm setting
@@ -122,7 +123,7 @@ public static void setUpClass() throws Exception {
         }
     }
 
-    @Before
+    @BeforeEach
     public void setUpDataverse() {
 
         try {
@@ -189,12 +190,12 @@ public void setUpDataverse() {
         }
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
         RestAssured.reset();
     }
 
-    @After
+    @AfterEach
     public void tearDownDataverse() {
         try {
 
@@ -231,7 +232,7 @@ public void tearDownDataverse() {
      * Ignores failed checksum manifest import.
      */
     @Test
-    @Ignore
+    @Disabled
     public void testSameFileInDifferentDirectories() {
 
         try {
@@ -310,7 +311,7 @@ public void testSameFileInDifferentDirectories() {
     }
 
     @Test
-    @Ignore
+    @Disabled
     public void testNewEditor() {
 
         try {
@@ -414,7 +415,7 @@ public void testNewEditor() {
      * Ignores failed checksum manifest import.
      */
     @Test
-    @Ignore
+    @Disabled
     public void testSameFileInDifferentDirectoriesUnauthorizedUser() {
 
         try {
@@ -613,7 +614,7 @@ public void testSameFileInDifferentDirectoriesUnauthorizedUser() {
 //    }
 
     @Test
-    @Ignore
+    @Disabled
     /**
      * Add a file in MERGE mode (default), should only need to commit the new file
      */
@@ -758,7 +759,7 @@ public void testAddingFilesInMergeMode() {
     }
 
     @Test
-    @Ignore
+    @Disabled
     /**
      * The success case: all files uploaded and present in checksum manifest
      */
@@ -827,7 +828,7 @@ public void testFilesWithChecksumManifest() {
     }
 
     @Test
-    @Ignore
+    @Disabled
     /**
      * No checksum manifest found
      */
@@ -881,7 +882,7 @@ public void testFilesWithoutChecksumManifest() {
     }
 
     @Test
-    @Ignore
+    @Disabled
     /**
      * Checksum manifest is missing an uploaded file
      */
@@ -948,7 +949,7 @@ public void testFileMissingInChecksumManifest() {
     }
 
     @Test
-    @Ignore
+    @Disabled
     /**
      * Checksum manifest references a file that isn't present, it should return failed status and detailed 
      * message in persistentUserData
@@ -1020,7 +1021,7 @@ public void testFileInChecksumManifestDoesntExist() {
     }
 
     @Test
-    @Ignore
+    @Disabled
     /**
      * Published datasets should not allow import jobs for now since it isn't in DRAFT mode
      */
@@ -1102,7 +1103,7 @@ public void testPublishedDataset() {
 //    }
 
     @Test
-    @Ignore
+    @Disabled
     /**
      * No dataset found responses (bad dataset id, etc.)
      */
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java
index 70c53c8c9b9..44739f3f62a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java
@@ -1,13 +1,14 @@
 package edu.harvard.iq.dataverse.api.imports;
 
 import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
 import org.mockito.InjectMocks;
-import org.mockito.junit.MockitoJUnitRunner;
+import org.mockito.junit.jupiter.MockitoExtension;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
 
-@RunWith(MockitoJUnitRunner.class)
+@ExtendWith(MockitoExtension.class)
 public class ImportGenericServiceBeanTest {
 
     @InjectMocks
@@ -16,18 +17,18 @@ public class ImportGenericServiceBeanTest {
     @Test
     public void testReassignIdentifierAsGlobalId() {
         // non-URL
-        Assert.assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("doi:10.7910/DVN/TJCLKP", new DatasetDTO()));
-        Assert.assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("hdl:10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("doi:10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("hdl:10.7910/DVN/TJCLKP", new DatasetDTO()));
         // HTTPS
-        Assert.assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("https://doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
-        Assert.assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("https://dx.doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
-        Assert.assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("https://hdl.handle.net/10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("https://doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("https://dx.doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("https://hdl.handle.net/10.7910/DVN/TJCLKP", new DatasetDTO()));
         // HTTP (no S)
-        Assert.assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
-        Assert.assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://dx.doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
-        Assert.assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://hdl.handle.net/10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://dx.doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://hdl.handle.net/10.7910/DVN/TJCLKP", new DatasetDTO()));
         // junk
-        Assert.assertEquals(null, importGenericService.reassignIdentifierAsGlobalId("junk", new DatasetDTO()));
+        assertNull(importGenericService.reassignIdentifierAsGlobalId("junk", new DatasetDTO()));
     }
 
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilderTest.java b/src/test/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilderTest.java
index a6da689da7a..51586127041 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilderTest.java
@@ -7,7 +7,7 @@
 import org.junit.jupiter.params.provider.ValueSource;
 import org.mockito.Mockito;
 
-import javax.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletRequest;
 
 import static org.junit.jupiter.api.Assertions.*;
 import static org.mockito.Mockito.*;
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/AuthUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/AuthUtilTest.java
index a7a33d0c1bd..74f6d714a5e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/AuthUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/AuthUtilTest.java
@@ -5,83 +5,57 @@
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.GoogleOAuth2AP;
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.OrcidOAuth2AP;
 import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProvider;
-import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashSet;
 
-import org.junit.Test;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
-@RunWith(Enclosed.class)
 public class AuthUtilTest {
-
-    @RunWith(Parameterized.class)
-    public static class AuthUtilParamTests {
-
-        @Parameters
-        public static Collection<String[]> data() {
-            return Arrays.asList(
-                    new String[][] {
-                        { null, null, null },
-                        { "Homer", "Homer", null },
-                        { "Simpson", null, "Simpson" },
-                        { "Homer Simpson", "Homer", "Simpson" },
-                        { "Homer Simpson", " Homer", "Simpson" }
-                    }
-                );
-        }
-
-        @Parameter
-        public String expectedDisplayName;
-
-        @Parameter(1)
-        public String displayFirst;
-
-        @Parameter(2)
-        public String displayLast;
-
-        @Test
-        public void testGetDisplayName() {
-            assertEquals(expectedDisplayName, AuthUtil.getDisplayName(displayFirst, displayLast));
-        }
+    
+    @ParameterizedTest
+    @CsvSource(value = {
+        "NULL,NULL,NULL",
+        "Homer,Homer,NULL",
+        "Simpson,NULL,Simpson",
+        "Homer Simpson,Homer,Simpson",
+        "Homer Simpson,Homer,Simpson"
+    }, nullValues = "NULL")
+    void testGetDisplayName(String expectedDisplayName, String displayFirst, String displayLast) {
+        assertEquals(expectedDisplayName, AuthUtil.getDisplayName(displayFirst, displayLast));
     }
-
-    public static class AuthUtilNoParamTests {
-
-        /**
-         * Test of isNonLocalLoginEnabled method, of class AuthUtil.
-         */
-        @Test
-        public void testIsNonLocalLoginEnabled() {
-            System.out.println("isNonLocalLoginEnabled");
-
-            AuthUtil authUtil = new AuthUtil();
-
-            assertEquals(false, AuthUtil.isNonLocalLoginEnabled(null));
-
-            Collection<AuthenticationProvider> shibOnly = new HashSet<>();
-            shibOnly.add(new ShibAuthenticationProvider());
-            assertEquals(true, AuthUtil.isNonLocalLoginEnabled(shibOnly));
-
-            Collection<AuthenticationProvider> manyNonLocal = new HashSet<>();
-            manyNonLocal.add(new ShibAuthenticationProvider());
-            manyNonLocal.add(new GitHubOAuth2AP(null, null));
-            manyNonLocal.add(new GoogleOAuth2AP(null, null));
-            manyNonLocal.add(new OrcidOAuth2AP(null, null, null));
-            assertEquals(true, AuthUtil.isNonLocalLoginEnabled(manyNonLocal));
-
-            Collection<AuthenticationProvider> onlyBuiltin = new HashSet<>();
-            onlyBuiltin.add(new BuiltinAuthenticationProvider(null, null, null));
-            // only builtin provider
-            assertEquals(false, AuthUtil.isNonLocalLoginEnabled(onlyBuiltin));
-
-        }
+    
+    /**
+     * Test of isNonLocalLoginEnabled method, of class AuthUtil.
+     */
+    @Test
+    public void testIsNonLocalLoginEnabled() {
+        System.out.println("isNonLocalLoginEnabled");
+        
+        AuthUtil authUtil = new AuthUtil();
+        
+        assertFalse(AuthUtil.isNonLocalLoginEnabled(null));
+        
+        Collection<AuthenticationProvider> shibOnly = new HashSet<>();
+        shibOnly.add(new ShibAuthenticationProvider());
+        assertTrue(AuthUtil.isNonLocalLoginEnabled(shibOnly));
+        
+        Collection<AuthenticationProvider> manyNonLocal = new HashSet<>();
+        manyNonLocal.add(new ShibAuthenticationProvider());
+        manyNonLocal.add(new GitHubOAuth2AP(null, null));
+        manyNonLocal.add(new GoogleOAuth2AP(null, null));
+        manyNonLocal.add(new OrcidOAuth2AP(null, null, null));
+        assertTrue(AuthUtil.isNonLocalLoginEnabled(manyNonLocal));
+        
+        Collection<AuthenticationProvider> onlyBuiltin = new HashSet<>();
+        onlyBuiltin.add(new BuiltinAuthenticationProvider(null, null, null));
+        // only builtin provider
+        assertFalse(AuthUtil.isNonLocalLoginEnabled(onlyBuiltin));
     }
 
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserDisplayInfoTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserDisplayInfoTest.java
index c22536e7616..4f04228df71 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserDisplayInfoTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserDisplayInfoTest.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.authorization;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBeanTest.java
index ea9f851f9ed..27927c33420 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBeanTest.java
@@ -13,8 +13,9 @@
 import java.util.Set;
 import static java.util.stream.Collectors.toList;
 import java.util.stream.Stream;
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
+
 import static edu.harvard.iq.dataverse.util.CollectionLiterals.*;
 
 /**
@@ -63,10 +64,10 @@ public void testFlattenGroupsCollection() throws GroupException {
                 
         List<Group> result = sut.flattenGroupsCollection(grps).collect(toList());
         
-        assertEquals( "Groups should appear only once", result.size(), new HashSet<>(result).size() );
+        assertEquals(result.size(), new HashSet<>(result).size(), "Groups should appear only once");
         
         grps.addAll( listOf(gAa, gAb, gAstar, AuthenticatedUsers.get()) );
-        assertEquals( "All groups should appear", grps, new HashSet<>(result) );
+        assertEquals(grps, new HashSet<>(result), "All groups should appear");
         
     }
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupUtilTest.java
index fdfd8d4370c..41a494d5e55 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupUtilTest.java
@@ -5,8 +5,8 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import java.util.LinkedHashSet;
 import java.util.Set;
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 public class GroupUtilTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupTest.java
index 543d3ab1eeb..afa07be2e38 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupTest.java
@@ -16,10 +16,10 @@
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.mocks.MockRoleAssigneeServiceBean;
 import static edu.harvard.iq.dataverse.mocks.MocksFactory.*;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.Test;
 
 /**
  *
@@ -34,15 +34,14 @@ public class ExplicitGroupTest {
     public ExplicitGroupTest() {
     }
     
-    @Test( expected=GroupException.class )
+    @Test
     public void addGroupToSelf() throws Exception {
         ExplicitGroup sut = new ExplicitGroup();
         sut.setDisplayName("a group");
-        sut.add( sut );
-        fail("A group cannot be added to itself.");
+        assertThrows(GroupException.class, () -> sut.add( sut ), "A group cannot be added to itself.");
     }
     
-    @Test( expected=GroupException.class )
+    @Test
     public void addGroupToDescendant() throws GroupException{
         Dataverse dv = makeDataverse();
         ExplicitGroup root = new ExplicitGroup(prv);
@@ -60,11 +59,10 @@ public void addGroupToDescendant() throws GroupException{
         
         sub.add( subSub );
         root.add( sub );
-        subSub.add(root);
-        fail("A group cannot contain its parent");
+        assertThrows(GroupException.class, () -> subSub.add(root), "A group cannot contain its parent");
     }
     
-    @Test( expected=GroupException.class )
+    @Test
     public void addGroupToUnrealtedGroup() throws GroupException {
         Dataverse dv1 = makeDataverse();
         Dataverse dv2 = makeDataverse();
@@ -73,9 +71,8 @@ public void addGroupToUnrealtedGroup() throws GroupException {
         g1.setOwner(dv1);
         g2.setOwner(dv2);
         
-        g1.add(g2);
-        fail("An explicit group cannot contain an explicit group defined in "
-                + "a dataverse that's not an ancestor of that group's owner dataverse.");
+        assertThrows(GroupException.class, () -> g1.add(g2), "An explicit group cannot contain an" +
+            "explicit group defined in a dataverse that's not an ancestor of that group's owner dataverse.");
         
     }
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupTest.java
index b6a3b862435..aeea93ad29e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupTest.java
@@ -5,8 +5,9 @@
 import edu.harvard.iq.dataverse.authorization.users.GuestUser;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4AddressTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4AddressTest.java
index d03846a97b4..4683d66decd 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4AddressTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4AddressTest.java
@@ -1,10 +1,10 @@
 package edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip;
 
-import java.math.BigDecimal;
 import java.math.BigInteger;
 import java.util.Arrays;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -22,9 +22,9 @@ public void testValueOf() {
         assertEquals( new IPv4Address(127,0,0,1), IPv4Address.valueOf("127.0.0.1") );
     }
     
-    @Test( expected=IllegalArgumentException.class )
-    public void testValueOf_bad() {
-        IPv4Address.valueOf("1.2.3");
+    @Test
+    void testValueOf_bad() {
+        assertThrows(IllegalArgumentException.class, () -> IPv4Address.valueOf("1.2.3"));
     }
     
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6AddressTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6AddressTest.java
index 2070dc347e7..77618e558ec 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6AddressTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6AddressTest.java
@@ -1,10 +1,11 @@
 package edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip;
 
 import java.util.Arrays;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -15,11 +16,11 @@ public class IPv6AddressTest {
     public IPv6AddressTest() {
     }
 
-    @Before
+    @BeforeEach
     public void setUp() {
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
@@ -37,15 +38,15 @@ public void testValueOfWithExpansion() {
         int[] expected = new int[]{0x2001, 0xdb8, 0x85a3, 0x0, 0, 0x8a2e, 0x370, 0x7334};
         IPv6Address adr = IPv6Address.valueOf("2001:db8:85a3::8a2e:370:7334");
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
 
         expected = new int[]{0x2001, 0xdb8, 0x0, 0x0, 0x0, 0x0, 0x370, 0x7334};
         adr = IPv6Address.valueOf("2001:db8::370:7334");
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
     }
 
@@ -54,16 +55,16 @@ public void testValueOfWithExpansionZerosAtStart() {
         int[] expected = new int[]{0, 0, 0, 0, 0, 0x8a2e, 0x370, 0x7334};
         IPv6Address adr = IPv6Address.valueOf("::8a2e:370:7334");
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
 
         expected = new int[]{0, 0, 0, 0, 0, 0, 0, 0x7334};
         adr = IPv6Address.valueOf("::7334");
         System.out.println("adr = " + adr);
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
     }
 
@@ -72,15 +73,15 @@ public void testValueOfWithExpansionZerosAtEnd() {
         int[] expected = new int[]{0x2001, 0x8a2e, 0, 0, 0, 0, 0, 0};
         IPv6Address adr = IPv6Address.valueOf("2001:8a2e::");
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
 
         expected = new int[]{0x1337, 0, 0, 0, 0, 0, 0, 0};
         adr = IPv6Address.valueOf("1337::");
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
     }
 
@@ -90,15 +91,15 @@ public void testValueOfWithExpansionSpecialCases() {
         IPv6Address adr = IPv6Address.valueOf("::");
         System.out.println("adr = " + adr);
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
 
         expected = new int[]{0, 0, 0, 0, 0, 0, 0, 1};
         adr = IPv6Address.valueOf("::1");
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
     }
 
@@ -108,24 +109,24 @@ public void testLocalhostness() {
         assertFalse(IPv6Address.valueOf("fff::1").isLocalhost());
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testIllegalLength() {
-        IPv6Address.valueOf("0:1:2:3");
+    @Test
+    void testIllegalLength() {
+        assertThrows(IllegalArgumentException.class, () -> IPv6Address.valueOf("0:1:2:3"));
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testIllegalLengthPrefix() {
-        IPv6Address.valueOf(":1:2:3");
+    @Test
+    void testIllegalLengthPrefix() {
+        assertThrows(IllegalArgumentException.class, () -> IPv6Address.valueOf(":1:2:3"));
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testIllegalLengthSuffix() {
-        IPv6Address.valueOf("1:2:3:");
+    @Test
+    void testIllegalLengthSuffix() {
+        assertThrows(IllegalArgumentException.class, () -> IPv6Address.valueOf("1:2:3:"));
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testIllegalNumber() {
-        IPv6Address.valueOf("::xxx");
+    @Test
+    void testIllegalNumber() {
+        assertThrows(IllegalArgumentException.class, () -> IPv6Address.valueOf("::xxx"));
     }
 
     @Test
@@ -150,8 +151,7 @@ public void testLongRoundTrips() {
                 "fe80::8358:c945:7094:2e6c",
                 "fe80::60d0:6eff:fece:7713", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff")) {
             IPv6Address addr = IPv6Address.valueOf(s);
-            assertEquals("Bad roundtrip on address: " + s,
-                    addr, new IPv6Address(addr.toLongArray()));
+            assertEquals(addr, new IPv6Address(addr.toLongArray()), "Bad roundtrip on address: " + s);
         }
     }
 
@@ -165,11 +165,9 @@ public void testInclusionAbove() {
                 "dd:a:a:a:a:a:b:a", "dd:a:a:a:a:a:a:b")) {
             IPv6Address ipv6 = IPv6Address.valueOf(addr);
             assertFalse(r.contains(ipv6));
-            assertTrue("for address " + ipv6, above(ipv6.toLongArray(),
-                    r.getTop().toLongArray()));
-            assertFalse("for address " + ipv6, between(r.getBottom().toLongArray(),
-                    r.getTop().toLongArray(),
-                    ipv6.toLongArray()));
+            assertTrue(above(ipv6.toLongArray(), r.getTop().toLongArray()), "for address " + ipv6);
+            assertFalse(between(r.getBottom().toLongArray(), r.getTop().toLongArray(), ipv6.toLongArray()),
+                "for address " + ipv6);
 
         }
     }
@@ -188,9 +186,8 @@ public void testInclusionBelow() {
             long[] bottomArr = r.getBottom().toLongArray();
             long[] addrArr = ipv6.toLongArray();
 
-            assertTrue("for address " + ipv6, above(bottomArr, addrArr));
-            assertFalse("for address " + ipv6, between(bottomArr,
-                    r.getTop().toLongArray(), addrArr));
+            assertTrue(above(bottomArr, addrArr), "for address " + ipv6);
+            assertFalse(between(bottomArr, r.getTop().toLongArray(), addrArr), "for address " + ipv6);
 
         }
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRangeTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRangeTest.java
index f232b713640..e3134dedaef 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRangeTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRangeTest.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -83,7 +83,7 @@ public void testSingleAddress() {
     
     public void testRange( Boolean expected, IpAddressRange range, IpAddress... addresses ) {
         for ( IpAddress ipa : addresses ) {
-            assertEquals( "Testing " + ipa + " in " + range, expected, range.contains(ipa));
+            assertEquals(expected, range.contains(ipa), "Testing " + ipa + " in " + range);
         }
     }
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressTest.java
index ce6ff29f1c1..e757472e316 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressTest.java
@@ -1,8 +1,9 @@
 package edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip;
 
 import java.util.Arrays;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBeanTest.java
index c260252f131..875cd02cc4c 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBeanTest.java
@@ -8,12 +8,11 @@
 import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.Arguments;
 import org.junit.jupiter.params.provider.MethodSource;
-import org.mockito.InjectMocks;
 import org.mockito.Mock;
 import org.mockito.junit.jupiter.MockitoExtension;
 
-import javax.persistence.EntityManager;
-import javax.persistence.TypedQuery;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.TypedQuery;
 import java.util.*;
 import java.util.stream.Stream;
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinAuthenticationProviderTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinAuthenticationProviderTest.java
index ebf22f9dcb4..ff51260d43e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinAuthenticationProviderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinAuthenticationProviderTest.java
@@ -7,9 +7,9 @@
 import edu.harvard.iq.dataverse.mocks.MockBuiltinUserServiceBean;
 import edu.harvard.iq.dataverse.mocks.MockPasswordValidatorServiceBean;
 import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean;
-import org.junit.Test;
-import static org.junit.Assert.*;
-import org.junit.Before;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.BeforeEach;
 
 /**
  *
@@ -22,7 +22,7 @@ public class BuiltinAuthenticationProviderTest {
     MockBuiltinUserServiceBean bean = null;
     AuthenticationServiceBean authBean = null;
     
-    @Before
+    @BeforeEach
     public void setup() {
         bean = new MockBuiltinUserServiceBean();
         passwordValidatorService = new MockPasswordValidatorServiceBean();
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPageTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPageTest.java
index defbc4416b5..c81edd6d102 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPageTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPageTest.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.authorization.providers.builtin;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.mockito.Mockito.mock;
 
 import java.sql.Timestamp;
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2AuthenticationProviderFactoryTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2AuthenticationProviderFactoryTest.java
index 5838fdee42c..ae73c505a4b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2AuthenticationProviderFactoryTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2AuthenticationProviderFactoryTest.java
@@ -2,8 +2,8 @@
 
 import java.util.HashMap;
 import java.util.Map;
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java
index 80249cc89e8..f133670e7ab 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java
@@ -19,10 +19,10 @@
 import org.mockito.junit.jupiter.MockitoExtension;
 import org.omnifaces.util.Faces;
 
-import javax.faces.context.ExternalContext;
-import javax.faces.context.FacesContext;
-import javax.faces.context.Flash;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.faces.context.ExternalContext;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.context.Flash;
+import jakarta.servlet.http.HttpServletRequest;
 
 import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2APTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2APTest.java
index 786c30fb2d7..ed6b9789848 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2APTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2APTest.java
@@ -2,10 +2,10 @@
 
 import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo;
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.AbstractOAuth2AuthenticationProvider;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class GitHubOAuth2APTest extends GitHubOAuth2AP {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2APTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2APTest.java
index 5cb2788c3ee..cfba755d2a1 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2APTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2APTest.java
@@ -2,9 +2,9 @@
 
 import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo;
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.AbstractOAuth2AuthenticationProvider;
-import javax.json.Json;
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import jakarta.json.Json;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 public class GoogleOAuth2APTest extends GoogleOAuth2AP {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java
index 58d32b7d2a1..c644a4e2b2a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java
@@ -3,74 +3,46 @@
 import java.io.IOException;
 import java.nio.file.Files;
 import java.nio.file.Paths;
-import java.util.Arrays;
-import java.util.Collection;
 import java.util.Map;
 import java.util.UUID;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.servlet.http.HttpServletRequest;
-import static org.junit.Assert.*;
-import org.junit.Test;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
+import jakarta.servlet.http.HttpServletRequest;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.mockito.Mockito.mock;
 
-@RunWith(Enclosed.class)
 public class ShibUtilTest {
-
-    @RunWith(Parameterized.class)
-    public static class ShibUtilParamTest {
-
-        @Parameters
-        public static Collection<String[]> data() {
-            return Arrays.asList(new String[][] {
-                { "John", "Harvard", "John", "Harvard", null },
-                { "Guido", "van Rossum", "Guido", "van Rossum", null },
-                { "Philip Seymour", "Hoffman", "Philip Seymour", "Hoffman", "Philip Seymour Hoffman" },
-                { "Edward", "Cummings", "Edward;e e", "Cummings", null },
-                { "Edward", "Cummings", "Edward;e e", "Cummings", "e e cummings" },
-                { "Anthony", "Stark", "Tony;Anthony", "Stark", null },
-                { "Anthony", "Stark", "Anthony;Tony", "Stark", null },
-                { "Antoni", "Gaudí", "Antoni", "Gaudí i Cornet;Gaudí", null },
-                { "Jane", "Doe", null, null, "Jane Doe" },
-                /**
-                * @todo Make findBestFirstAndLastName smart enough to know that the last name
-                *       should be "Hoffman" rather than "Seymour".
-                */
-                { "Philip", "Seymour", null, null, "Philip Seymour Hoffman" },
-                { null, null, null, null, "" }
-            });
-        }
-
-        @Parameter
-        public String expectedFirstName;
-
-        @Parameter(1)
-        public String expectedLastName;
-
-        @Parameter(2)
-        public String actualFirstName;
-
-        @Parameter(3)
-        public String actualLastName;
-
-        @Parameter(4)
-        public String actualDisplayName;
-
-        @Test
-        public void testFindBestFirstAndLastName() {
-
-            // ShibUserNameFields expected1 = new ShibUserNameFields("John", "Harvard");
-            ShibUserNameFields actualValues = ShibUtil.findBestFirstAndLastName(actualFirstName, actualLastName, actualDisplayName);
-            assertEquals(expectedFirstName, actualValues.getFirstName());
-            assertEquals(expectedLastName, actualValues.getLastName());
-        }
+    
+    @ParameterizedTest
+    @CsvSource(value = {
+        "John,Harvard,John,Harvard,NULL",
+        "Guido,van Rossum,Guido,van Rossum,NULL",
+        "Philip Seymour,Hoffman,Philip Seymour,Hoffman,Philip Seymour Hoffman",
+        "Edward,Cummings,Edward;e e,Cummings,NULL",
+        "Edward,Cummings,Edward;e e,Cummings,e e cummings",
+        "Anthony,Stark,Tony;Anthony,Stark,NULL",
+        "Anthony,Stark,Anthony;Tony,Stark,NULL",
+        "Antoni,Gaudí,Antoni,Gaudí i Cornet;Gaudí,NULL",
+        "Jane,Doe,NULL,NULL,Jane Doe",
+        /**
+         * @todo Make findBestFirstAndLastName smart enough to know that the last name
+         *       should be "Hoffman" rather than "Seymour".
+         */
+        "Philip,Seymour,NULL,NULL,Philip Seymour Hoffman",
+        "NULL,NULL,NULL,NULL,EMPTY"
+    }, nullValues = "NULL", emptyValue = "EMPTY")
+    void testFindBestFirstAndLastName(String expectedFirstName, String expectedLastName, String actualFirstName,
+                                      String actualLastName, String actualDisplayName) {
+        // ShibUserNameFields expected1 = new ShibUserNameFields("John", "Harvard");
+        ShibUserNameFields actualValues = ShibUtil.findBestFirstAndLastName(actualFirstName, actualLastName, actualDisplayName);
+        assertEquals(expectedFirstName, actualValues.getFirstName());
+        assertEquals(expectedLastName, actualValues.getLastName());
     }
+    
 
     public static class ShibUtilNoParamTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java
index 5606bbe6aa3..7bd802b3b02 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java
@@ -18,23 +18,24 @@
 import java.util.HashSet;
 import java.util.Set;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
-import org.junit.Before;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.extension.ExtendWith;
+
+import jakarta.json.JsonObject;
+import jakarta.json.JsonString;
 
-import javax.json.JsonObject;
-import javax.json.JsonString;
-import org.junit.runner.RunWith;
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
-import org.mockito.junit.MockitoJUnitRunner;
+import org.mockito.junit.jupiter.MockitoExtension;
 
 /**
  * Tested class: AuthenticatedUser.java
  *
  * @author bsilverstein
  */
-@RunWith(MockitoJUnitRunner.class)
+@ExtendWith(MockitoExtension.class)
 public class AuthenticatedUserTest {
 
     @Mock
@@ -51,7 +52,7 @@ public AuthenticatedUserTest() {
     public static final String IDENTIFIER_PREFIX = "@";
     public static final Set<Type> mutedTypes = EnumSet.of(Type.ASSIGNROLE, Type.REVOKEROLE);
 
-    @Before
+    @BeforeEach
     public void setUp() {
         testUser = MocksFactory.makeAuthenticatedUser("Homer", "Simpson");
         expResult = testUser.getCreatedTime();
@@ -363,14 +364,14 @@ public void testMutingInJson() {
         JsonObject jObject = testUser.toJson().build();
 
         Set<String> mutedEmails = new HashSet<>(jObject.getJsonArray("mutedEmails").getValuesAs(JsonString::getString));
-        assertTrue("Set contains two elements", mutedEmails.size() == 2);
-        assertTrue("Set contains REVOKEROLE", mutedEmails.contains("REVOKEROLE"));
-        assertTrue("Set contains ASSIGNROLE", mutedEmails.contains("ASSIGNROLE"));
+        assertTrue(mutedEmails.size() == 2, "Set contains two elements");
+        assertTrue(mutedEmails.contains("REVOKEROLE"), "Set contains REVOKEROLE");
+        assertTrue(mutedEmails.contains("ASSIGNROLE"), "Set contains ASSIGNROLE");
 
         Set<String> mutedNotifications = new HashSet<>(jObject.getJsonArray("mutedNotifications").getValuesAs(JsonString::getString));
-        assertTrue("Set contains two elements", mutedNotifications.size() == 2);
-        assertTrue("Set contains REVOKEROLE", mutedNotifications.contains("REVOKEROLE"));
-        assertTrue("Set contains ASSIGNROLE", mutedNotifications.contains("ASSIGNROLE"));
+        assertTrue(mutedNotifications.size() == 2, "Set contains two elements");
+        assertTrue(mutedNotifications.contains("REVOKEROLE"), "Set contains REVOKEROLE");
+        assertTrue(mutedNotifications.contains("ASSIGNROLE"), "Set contains ASSIGNROLE");
     }
 
     @Test
@@ -400,10 +401,10 @@ public void testTypeTokenizer() {
                 Type.tokenizeToSet(" ASSIGNROLE , CREATEDV,REVOKEROLE  ")
             )
         );
-        assertTrue("typeSet contains 3 elements", typeSet.size() == 3);
-        assertTrue("typeSet contains ASSIGNROLE", typeSet.contains(Type.ASSIGNROLE));
-        assertTrue("typeSet contains CREATEDV", typeSet.contains(Type.CREATEDV));
-        assertTrue("typeSet contains REVOKEROLE", typeSet.contains(Type.REVOKEROLE));
+        assertTrue(typeSet.size() == 3, "typeSet contains 3 elements");
+        assertTrue(typeSet.contains(Type.ASSIGNROLE), "typeSet contains ASSIGNROLE");
+        assertTrue(typeSet.contains(Type.CREATEDV), "typeSet contains CREATEDV");
+        assertTrue(typeSet.contains(Type.REVOKEROLE), "typeSet contains REVOKEROLE");
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java
index 95deafc0cfe..2b526b8a449 100644
--- a/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java
@@ -7,8 +7,8 @@
 import java.util.Arrays;
 import java.util.logging.Logger;
 import java.util.stream.Stream;
-import javax.mail.internet.AddressException;
-import javax.mail.internet.InternetAddress;
+import jakarta.mail.internet.AddressException;
+import jakarta.mail.internet.InternetAddress;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import org.junit.jupiter.api.AfterAll;
diff --git a/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailDataTest.java b/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailDataTest.java
index 45c4162188a..10de20239e5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailDataTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailDataTest.java
@@ -1,11 +1,11 @@
 package edu.harvard.iq.dataverse.confirmemail;
 
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 
@@ -14,13 +14,13 @@ public class ConfirmEmailDataTest {
     private ConfirmEmailData instance;
     private AuthenticatedUser user;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         this.user = new AuthenticatedUser();
         this.instance = new ConfirmEmailData(user, 60);
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
         this.instance = null;
         this.user = null;
diff --git a/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailUtilTest.java
index 8fdc7dc38d5..43795f0114f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailUtilTest.java
@@ -1,61 +1,41 @@
 package edu.harvard.iq.dataverse.confirmemail;
 
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
+
 import java.sql.Timestamp;
-import java.util.Collection;
-import java.util.Arrays;
+import java.util.stream.Stream;
 
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
-@RunWith(Enclosed.class)
 public class ConfirmEmailUtilTest {
-
-    @RunWith(Parameterized.class)
-    public static class ConfirmEmailUtilParamTest {
-
-        public String timeAsFriendlyString;
-        public int timeInMinutes;
-
-        public ConfirmEmailUtilParamTest(String timeAsFriendlyString, int timeInSeconds) {
-            this.timeAsFriendlyString = timeAsFriendlyString;
-            this.timeInMinutes = timeInSeconds;
-        }
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(
-                    new Object[][] { 
-                        { "48 hours", 2880 }, 
-                        { "24 hours", 1440 },
-                        { "2.75 hours", 165 },
-                        { "2.5 hours", 150 },
-                        { "1.5 hours", 90 }, 
-                        { "1 hour", 60 }, 
-                        { "30 minutes", 30 }, 
-                        { "1 minute", 1 } 
-                    }
-            );
-        }
-
-        @Test
-        public void friendlyExpirationTimeTest() {
-            assertEquals(timeAsFriendlyString, ConfirmEmailUtil.friendlyExpirationTime(timeInMinutes));
-        }
+    
+    static Stream<Arguments> dataPoints() {
+        return Stream.of(
+            Arguments.of("48 hours", 2880),
+            Arguments.of("24 hours", 1440),
+            Arguments.of("2.75 hours", 165),
+            Arguments.of("2.5 hours", 150),
+            Arguments.of("1.5 hours", 90),
+            Arguments.of("1 hour", 60),
+            Arguments.of("30 minutes", 30),
+            Arguments.of("1 minute", 1)
+        );
     }
-
-    public static class ConfirmEmailUtilNoParamTest {
-
-        @Test
-        public void testGrandfatheredTime() {
-            System.out.println();
-            System.out.println("Grandfathered account timestamp test");
-            System.out.println("Grandfathered Time (y2k): " + ConfirmEmailUtil.getGrandfatheredTime());
-            assertEquals(Timestamp.valueOf("2000-01-01 00:00:00.0"), ConfirmEmailUtil.getGrandfatheredTime());
-            System.out.println();
-        }
+    
+    @ParameterizedTest
+    @MethodSource("dataPoints")
+    void friendlyExpirationTimeTest(String timeAsFriendlyString, int timeInMinutes) {
+        assertEquals(timeAsFriendlyString, ConfirmEmailUtil.friendlyExpirationTime(timeInMinutes));
+    }
+    
+    @Test
+    void testGrandfatheredTime() {
+        //System.out.println("Grandfathered account timestamp test");
+        //System.out.println("Grandfathered Time (y2k): " + ConfirmEmailUtil.getGrandfatheredTime());
+        assertEquals(Timestamp.valueOf("2000-01-01 00:00:00.0"), ConfirmEmailUtil.getGrandfatheredTime());
+        //System.out.println();
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java
index 95621dd8750..552d76b74e8 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java
@@ -23,13 +23,13 @@
 import java.util.ArrayList;
 import java.util.List;
 import org.apache.commons.io.FileUtils;
-import org.junit.After;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
-import org.junit.Before;
+import org.junit.jupiter.api.BeforeEach;
 
 /**
  *
@@ -51,7 +51,7 @@ public class FileAccessIOTest {
     public FileAccessIOTest() {
     }
 
-    @Before
+    @BeforeEach
     public void setUpClass() throws IOException {
         dataverse = MocksFactory.makeDataverse();
         dataset = MocksFactory.makeDataset();
@@ -77,7 +77,7 @@ public void setUpClass() throws IOException {
         }
     }
 
-    @After
+    @AfterEach
     public void tearDownClass() throws IOException {
         FileUtils.deleteDirectory(new File("/tmp/files/"));
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java
index 83cb0c72786..2ed9d18036d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java
@@ -21,8 +21,9 @@
 import java.util.List;
 //import org.apache.commons.httpclient.Header;
 //import org.apache.commons.httpclient.methods.GetMethod;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java
index c1aa6b5fca3..942e4329384 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java
@@ -12,11 +12,10 @@
 import java.security.InvalidKeyException;
 import java.security.NoSuchAlgorithmException;
 import java.security.SignatureException;
-import javax.crypto.Mac;
-import javax.crypto.spec.SecretKeySpec;
-import org.junit.Test;
-import static org.junit.Assert.*;
-import org.junit.Before;
+
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.BeforeEach;
 
 /**
  *
@@ -34,7 +33,7 @@ public class SwiftAccessIOTest {
     public SwiftAccessIOTest() {
     }
 
-    @Before
+    @BeforeEach
     public void setUpClass() throws IOException {
         datafile = MocksFactory.makeDataFile();
         dataset = MocksFactory.makeDataset();
diff --git a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBeanIT.java b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBeanIT.java
index c44495be0c2..a37f8b724fe 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBeanIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBeanIT.java
@@ -11,15 +11,15 @@
 import java.util.Calendar;
 import java.util.TimeZone;
 import java.util.logging.Logger;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import javax.json.JsonObject;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import jakarta.json.JsonObject;
 import static java.lang.Thread.sleep;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonReader;
 
 /**
  * These tests are not expected to pass unless you have a Data Capture Module
diff --git a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java
index a00daef63c2..eb19f22df63 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java
@@ -7,18 +7,18 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import static edu.harvard.iq.dataverse.mocks.MocksFactory.makeAuthenticatedUser;
 import java.io.UnsupportedEncodingException;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
 import org.apache.http.HttpResponseFactory;
 import org.apache.http.HttpStatus;
 import org.apache.http.HttpVersion;
 import org.apache.http.entity.StringEntity;
 import org.apache.http.impl.DefaultHttpResponseFactory;
 import org.apache.http.message.BasicStatusLine;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.Test;
 
 public class DataCaptureModuleUtilTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java
index 46bce999c60..8eed2a33c5a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java
@@ -11,8 +11,9 @@
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import java.util.ArrayList;
 import java.util.List;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 public class DatasetUtilTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileCheckerTest.java b/src/test/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileCheckerTest.java
index f7fe81b16e3..ced15594f85 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileCheckerTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileCheckerTest.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse.datasetutility;
 
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.*;
 import static org.mockito.Mockito.mock;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import edu.harvard.iq.dataverse.DataFile;
@@ -19,13 +19,13 @@ public class DuplicateFileCheckerTest {
     private DuplicateFileChecker duplicateFileChecker;
     private DatasetVersionServiceBean datasetVersionServiceBean;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         this.datasetVersionServiceBean = mock(DatasetVersionServiceBean.class);
         this.duplicateFileChecker = new DuplicateFileChecker(datasetVersionServiceBean);
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
         duplicateFileChecker = null;
     }
@@ -34,9 +34,9 @@ public void tearDown() {
     // test constructor
     // ----------------------------------------------------------------------------------------------------------
 
-    @Test(expected = NullPointerException.class)
-    public void testConstructorWithUndefinedDatasetVersionService() {
-        DuplicateFileChecker duplicateFileChecker = new DuplicateFileChecker(null);
+    @Test
+    void testConstructorWithUndefinedDatasetVersionService() {
+        assertThrows(NullPointerException.class, () -> new DuplicateFileChecker(null));
     }
 
     @Test
@@ -49,20 +49,20 @@ public void testConstructorWithDefinedDatasetVersionService() {
     // test public boolean isFileInSavedDatasetVersion(DatasetVersion datasetVersion, String checkSum)
     // ----------------------------------------------------------------------------------------------------------
 
-    @Test(expected = NullPointerException.class)
-    public void testIsFileInSavedDatasetVersionWithCheckSumParamWithUndefinedDatasetVersion() {
+    @Test
+    void testIsFileInSavedDatasetVersionWithCheckSumParamWithUndefinedDatasetVersion() {
         DatasetVersion datasetVersion = null;
         String checkSum = "checkSum";
-
-        this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, checkSum);
+        
+        assertThrows(NullPointerException.class, () -> this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, checkSum));
     }
 
-    @Test(expected = NullPointerException.class)
-    public void testIsFileInSavedDatasetVersionWithChecksumParamWithUndefinedChecksum() {
+    @Test
+    void testIsFileInSavedDatasetVersionWithChecksumParamWithUndefinedChecksum() {
         DatasetVersion datasetVersion = new DatasetVersion();
         String checkSum = null;
-
-        this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, checkSum);
+        
+        assertThrows(NullPointerException.class, () -> this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, checkSum));
     }
 
     @Test
@@ -81,20 +81,20 @@ public void testIsFileInSavedDatasetVersionWithChecksumParamWithUnsavedFile() {
     // test public boolean isFileInSavedDatasetVersion(DatasetVersion datasetVersion, FileMetadata fileMetadata)
     // ----------------------------------------------------------------------------------------------------------
 
-    @Test(expected = NullPointerException.class)
-    public void testIsFileInSavedDatasetVersionWithFileMetadataParamWithUndefinedDatasetVersion() {
+    @Test
+    void testIsFileInSavedDatasetVersionWithFileMetadataParamWithUndefinedDatasetVersion() {
         DatasetVersion datasetVersion = null;
         FileMetadata fileMetadata = new FileMetadata();
-
-        this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, fileMetadata);
+        
+        assertThrows(NullPointerException.class, () -> this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, fileMetadata));
     }
 
-    @Test(expected = NullPointerException.class)
-    public void testIsFileInSavedDatasetVersionWithFileMetadataParamWithUndefinedFileMetadata() {
+    @Test
+    void testIsFileInSavedDatasetVersionWithFileMetadataParamWithUndefinedFileMetadata() {
         DatasetVersion datasetVersion = new DatasetVersion();
         FileMetadata fileMetadata = null;
-
-        this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, fileMetadata);
+        
+        assertThrows(NullPointerException.class, () -> this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, fileMetadata));
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
index f8c790a566b..c9f251f7e77 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
@@ -10,19 +10,14 @@
 import edu.harvard.iq.dataverse.DataFileCategory;
 import edu.harvard.iq.dataverse.DataFileTag;
 import edu.harvard.iq.dataverse.FileMetadata;
-import java.util.ArrayList;
+
 import java.util.Arrays;
 import java.util.List;
-import java.util.ResourceBundle;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import org.hamcrest.Matchers;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  * 
diff --git a/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java b/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java
index 470338d0462..bfb9134cfca 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.datavariable;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import javax.xml.stream.XMLInputFactory;
 import javax.xml.stream.XMLStreamException;
@@ -12,7 +12,7 @@
 import java.util.HashSet;
 import java.util.Collection;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class VariableMetadataDDIParserTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataverse/DataverseUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/dataverse/DataverseUtilTest.java
index bf679f8fe97..01e0edd3073 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataverse/DataverseUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataverse/DataverseUtilTest.java
@@ -4,15 +4,17 @@
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DvObjectContainer;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
-import static org.junit.Assert.assertEquals;
+
+import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
 
 import java.util.HashMap;
 import java.util.Map;
 
-import javax.ws.rs.BadRequestException;
+import jakarta.ws.rs.BadRequestException;
 
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class DataverseUtilTest {
 
@@ -42,69 +44,30 @@ public void testCheckMetadataLanguageCases() {
         definedFrenchD.setMetadataLanguage("fr");
         Dataset definedSpanishD = new Dataset();
         definedSpanishD.setMetadataLanguage("es");
+        
         // Not set tests:
         //Good - no mLang sent, parent doesn't have one
-        try {
-            DataverseUtil.checkMetadataLangauge(undefinedD, undefinedParent, emptyMLangSettingMap);
-        } catch (BadRequestException e) {
-            Assert.fail();
-        }
+        assertDoesNotThrow(() -> DataverseUtil.checkMetadataLangauge(undefinedD, undefinedParent, emptyMLangSettingMap));
         //Bad - one sent, parent doesn't have one
-        try {
-            DataverseUtil.checkMetadataLangauge(definedEnglishD, undefinedParent, emptyMLangSettingMap);
-            Assert.fail();
-        } catch (BadRequestException e) {
-        }
+        assertThrows(BadRequestException.class, () -> DataverseUtil.checkMetadataLangauge(definedEnglishD, undefinedParent, emptyMLangSettingMap));
         //Good - one sent, matches parent
-        try {
-            DataverseUtil.checkMetadataLangauge(definedEnglishD, definedParent, emptyMLangSettingMap);
-
-        } catch (BadRequestException e) {
-            Assert.fail();
-        }
+        assertDoesNotThrow(() -> DataverseUtil.checkMetadataLangauge(definedEnglishD, definedParent, emptyMLangSettingMap));
         //Bad - one sent, doesn't match parent
-        try {
-            DataverseUtil.checkMetadataLangauge(definedFrenchD, definedParent, emptyMLangSettingMap);
-            Assert.fail();
-        } catch (BadRequestException e) {
-        }
+        assertThrows(BadRequestException.class, () -> DataverseUtil.checkMetadataLangauge(definedFrenchD, definedParent, emptyMLangSettingMap));
+        
         //With setting tests
-      //Bad - one sent, parent doesn't have one
-        try {
-            DataverseUtil.checkMetadataLangauge(undefinedD, undefinedParent, mLangSettingMap);
-            Assert.fail();
-        } catch (BadRequestException e) {
-        }
+        //Bad - one sent, parent doesn't have one
+        assertThrows(BadRequestException.class, () -> DataverseUtil.checkMetadataLangauge(undefinedD, undefinedParent, mLangSettingMap));
         //Good - sent, parent undefined, is allowed by setting
-        try {
-            DataverseUtil.checkMetadataLangauge(definedEnglishD, undefinedParent, mLangSettingMap);
-        } catch (BadRequestException e) {
-            Assert.fail();
-        }
+        assertDoesNotThrow(() -> DataverseUtil.checkMetadataLangauge(definedEnglishD, undefinedParent, mLangSettingMap));
         //Bad  one sent, parent undefined, not allowed by setting
-        try {
-            DataverseUtil.checkMetadataLangauge(definedSpanishD, undefinedParent, mLangSettingMap);
-            Assert.fail();
-        } catch (BadRequestException e) {
-        }
+        assertThrows(BadRequestException.class, () -> DataverseUtil.checkMetadataLangauge(definedSpanishD, undefinedParent, mLangSettingMap));
         //Bad - one sent, doesn't match parent
-        try {
-            DataverseUtil.checkMetadataLangauge(definedFrenchD, definedParent, mLangSettingMap);
-            Assert.fail();
-        } catch (BadRequestException e) {
-        }
+        assertThrows(BadRequestException.class, () -> DataverseUtil.checkMetadataLangauge(definedFrenchD, definedParent, mLangSettingMap));
         //Bad - undefined sent, parent is defined
-        try {
-            DataverseUtil.checkMetadataLangauge(undefinedD, definedParent, mLangSettingMap);
-            Assert.fail();
-        } catch (BadRequestException e) {
-        }
-      //Good - sent, parent defined, they match
-        try {
-            DataverseUtil.checkMetadataLangauge(definedEnglishD, definedParent, mLangSettingMap);
-        } catch (BadRequestException e) {
-            Assert.fail();
-        }
+        assertThrows(BadRequestException.class, () -> DataverseUtil.checkMetadataLangauge(undefinedD, definedParent, mLangSettingMap));
+        //Good - sent, parent defined, they match
+        assertDoesNotThrow(() -> DataverseUtil.checkMetadataLangauge(definedEnglishD, definedParent, mLangSettingMap));
     }
 
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/PermissionTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/PermissionTest.java
index ec300a25db7..4b98d3d9850 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/PermissionTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/PermissionTest.java
@@ -5,8 +5,9 @@
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DvObject;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java b/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java
index 410f0489613..99da9198296 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java
@@ -23,7 +23,7 @@
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean;
 import java.util.Stack;
-import javax.persistence.EntityManager;
+import jakarta.persistence.EntityManager;
 
 /**
  * A base CommandContext for tests. Provides no-op implementations. Should
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/TestEntityManager.java b/src/test/java/edu/harvard/iq/dataverse/engine/TestEntityManager.java
index b1ad74ac2dc..af8b75d5d80 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/TestEntityManager.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/TestEntityManager.java
@@ -14,21 +14,21 @@
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.persistence.EntityGraph;
-import javax.persistence.EntityManager;
-import javax.persistence.EntityManagerFactory;
-import javax.persistence.EntityTransaction;
-import javax.persistence.FlushModeType;
-import javax.persistence.Id;
-import javax.persistence.LockModeType;
-import javax.persistence.Query;
-import javax.persistence.StoredProcedureQuery;
-import javax.persistence.TypedQuery;
-import javax.persistence.criteria.CriteriaBuilder;
-import javax.persistence.criteria.CriteriaDelete;
-import javax.persistence.criteria.CriteriaQuery;
-import javax.persistence.criteria.CriteriaUpdate;
-import javax.persistence.metamodel.Metamodel;
+import jakarta.persistence.EntityGraph;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.EntityManagerFactory;
+import jakarta.persistence.EntityTransaction;
+import jakarta.persistence.FlushModeType;
+import jakarta.persistence.Id;
+import jakarta.persistence.LockModeType;
+import jakarta.persistence.Query;
+import jakarta.persistence.StoredProcedureQuery;
+import jakarta.persistence.TypedQuery;
+import jakarta.persistence.criteria.CriteriaBuilder;
+import jakarta.persistence.criteria.CriteriaDelete;
+import jakarta.persistence.criteria.CriteriaQuery;
+import jakarta.persistence.criteria.CriteriaUpdate;
+import jakarta.persistence.metamodel.Metamodel;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommandTest.java
index a87de12cfe0..efadd14438a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommandTest.java
@@ -5,10 +5,12 @@
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import static edu.harvard.iq.dataverse.mocks.MocksFactory.*;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -17,16 +19,18 @@
 public class AbstractDatasetCommandTest {
     
    
-    @Test(expected=IllegalArgumentException.class)
-    @SuppressWarnings("ResultOfObjectAllocationIgnored")
-    public void testNullDataset() {
-        new AbstractDatasetCommandImpl(makeRequest(), null);
+    @Test
+    void testNullDataset() {
+        DataverseRequest request = makeRequest();
+        assertThrows(IllegalArgumentException.class, () -> new AbstractDatasetCommandImpl(request, null));
     }
     
-    @Test(expected=IllegalArgumentException.class)
-    @SuppressWarnings("ResultOfObjectAllocationIgnored")
-    public void testNullDatasetNonNullParent() {
-        new AbstractDatasetCommandImpl(makeRequest(), null, makeDataverse());
+    @Test
+    void testNullDatasetNonNullParent() {
+        DataverseRequest request = makeRequest();
+        Dataverse dataverse = makeDataverse();
+        assertThrows(IllegalArgumentException.class,
+            () -> new AbstractDatasetCommandImpl(request, null, dataverse));
     }
     
     /**
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommandTest.java
index dd8901a05dc..a2d9cdfb917 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommandTest.java
@@ -16,9 +16,11 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  *
@@ -68,8 +70,8 @@ public void testSimpleVersionAddition() throws Exception {
         assertEquals(expected, testEngine.getReqiredPermissionsForObjects() );
     }
     
-    @Test(expected=IllegalCommandException.class)
-    public void testCantCreateTwoDraftVersions() throws Exception {
+    @Test
+    void testCantCreateTwoDraftVersions() {
         DatasetVersion dsvNew = new DatasetVersion();
         dsvNew.setVersionState(DatasetVersion.VersionState.DRAFT);
         Dataset sampleDataset = makeDataset();
@@ -87,7 +89,7 @@ public DatasetServiceBean datasets() {
             
         });
         
-        testEngine.submit(sut);
+        assertThrows(IllegalCommandException.class, () -> testEngine.submit(sut));
     }
     
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java
index bcbc12d5d4e..7e84cf19e6b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java
@@ -15,14 +15,15 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.GuestUser;
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import static edu.harvard.iq.dataverse.mocks.MocksFactory.*;
 import edu.harvard.iq.dataverse.engine.TestCommandContext;
 import edu.harvard.iq.dataverse.engine.TestDataverseEngine;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+
 import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -31,9 +32,9 @@
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.Future;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  *
@@ -164,7 +165,7 @@ public void deleteFacetsFor(Dataverse d) {
     TestDataverseEngine engine;
     
     
-    @Before
+    @BeforeEach
     public void setUp() {
         indexCalled = false;
         dvStore.clear();
@@ -232,8 +233,8 @@ public void testDefaultOptions() throws CommandException {
         
         // The following is a pretty wierd way to test that the create date defaults to 
         // now, but it works across date changes.
-        assertTrue( "When the supplied creation date is null, date shuld default to command execution time",
-                        Math.abs(System.currentTimeMillis() - result.getCreateDate().toInstant().toEpochMilli()) < 1000 );
+        assertTrue(Math.abs(System.currentTimeMillis() - result.getCreateDate().toInstant().toEpochMilli()) < 1000,
+            "When the supplied creation date is null, date should default to command execution time");
         
         assertTrue( result.isPermissionRoot() );
         assertTrue( result.isThemeRoot() );
@@ -298,31 +299,38 @@ public void testCustomOptions() throws CommandException {
         }
     }
     
-    @Test( expected=IllegalCommandException.class )
-    public void testCantCreateAdditionalRoot() throws Exception {
-        engine.submit( new CreateDataverseCommand(makeDataverse(), makeRequest(), null, null) );
+    @Test
+    void testCantCreateAdditionalRoot() {
+        assertThrows(IllegalCommandException.class,
+            () -> engine.submit( new CreateDataverseCommand(makeDataverse(), makeRequest(), null, null) )
+        );
     }
     
-    @Test( expected=IllegalCommandException.class )
-    public void testGuestCantCreateDataverse() throws Exception {
+    @Test
+    void testGuestCantCreateDataverse() {
         final DataverseRequest request = new DataverseRequest( GuestUser.get(), IpAddress.valueOf("::") );
         isRootDvExists = false;
-        engine.submit(new CreateDataverseCommand(makeDataverse(), request, null, null) );
+        assertThrows(IllegalCommandException.class,
+            () -> engine.submit(new CreateDataverseCommand(makeDataverse(), request, null, null) )
+        );
     }
 
-    @Test( expected=IllegalCommandException.class )
-    public void testCantCreateAnotherWithSameAlias() throws Exception {
+    @Test
+    void testCantCreateAnotherWithSameAlias() {
         
         String alias = "alias";
         final Dataverse dvFirst = makeDataverse();
         dvFirst.setAlias(alias);
         dvFirst.setOwner( makeDataverse() );
-        engine.submit(new CreateDataverseCommand(dvFirst, makeRequest(), null, null) );
+        assertThrows(IllegalCommandException.class,
+            () -> engine.submit(new CreateDataverseCommand(dvFirst, makeRequest(), null, null) ));
         
         final Dataverse dv = makeDataverse();
         dv.setOwner( makeDataverse() );
         dv.setAlias(alias);
-        engine.submit(new CreateDataverseCommand(dv, makeRequest(), null, null) );
+        assertThrows(IllegalCommandException.class,
+            () -> engine.submit(new CreateDataverseCommand(dv, makeRequest(), null, null) )
+        );
     }
     
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java
index aafad58654e..33f9acd0e1a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java
@@ -14,12 +14,13 @@
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.ArrayList;
 import java.util.List;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class CreatePrivateUrlCommandTest {
 
@@ -31,7 +32,7 @@ public class CreatePrivateUrlCommandTest {
     private final Long versionIsReleased = 4l;
     
     
-    @Before
+    @BeforeEach
     public void setUp() {
         dataset = new Dataset();
         testEngine = new TestDataverseEngine(new TestCommandContext() {
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java
index 243285e69ab..3d947879e56 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java
@@ -14,12 +14,15 @@
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
-import javax.persistence.EntityManager;
-import javax.persistence.TypedQuery;
-import static org.junit.Assert.assertTrue;
-import org.junit.Before;
-import org.junit.Test;
-import org.mockito.Matchers;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.TypedQuery;
+
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.ArgumentMatchers;
+
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
@@ -50,13 +53,13 @@ public EntityManager em() {
         }
     });
     
-    @Before
+    @BeforeEach
     public void before() {
         saveCalled = false;
     }
     
-    @Test( expected = IllegalCommandException.class )
-    public void testNonSuperUsersCantAddRoles() throws CommandException {
+    @Test
+    void testNonSuperUsersCantAddRoles() {
         DataverseRole dvr = new DataverseRole();
         dvr.setAlias("roleTest");
         dvr.setName("Tester Role");
@@ -69,8 +72,7 @@ public void testNonSuperUsersCantAddRoles() throws CommandException {
         normalUser.setSuperuser(false);
         
         CreateRoleCommand sut = new CreateRoleCommand(dvr, new DataverseRequest(normalUser,IpAddress.valueOf("89.17.33.33")), dv);
-        engine.submit(sut);
-    
+        assertThrows(IllegalCommandException.class, () -> engine.submit(sut));
     }
    
     @Test
@@ -88,12 +90,12 @@ public void testSuperUsersAddRoles() throws CommandException {
         
         CreateRoleCommand sut = new CreateRoleCommand(dvr, new DataverseRequest(normalUser,IpAddress.valueOf("89.17.33.33")), dv);
         engine.submit(sut);
-        assertTrue( "CreateRoleCommand did not call save on the created role.", saveCalled );
+        assertTrue(saveCalled, "CreateRoleCommand did not call save on the created role.");
     
     }
     
-    @Test( expected = IllegalCommandException.class )
-    public void testGuestUsersCantAddRoles() throws CommandException {
+    @Test
+    void testGuestUsersCantAddRoles() {
         DataverseRole dvr = new DataverseRole();
         dvr.setAlias("roleTest");
         dvr.setName("Tester Role");
@@ -103,7 +105,7 @@ public void testGuestUsersCantAddRoles() throws CommandException {
         dvr.setOwner(dv);
         
         CreateRoleCommand sut = new CreateRoleCommand(dvr, new DataverseRequest(GuestUser.get(),IpAddress.valueOf("89.17.33.33")), dv);
-        engine.submit(sut);    
+        assertThrows(IllegalCommandException.class, () -> engine.submit(sut));
     }
     
     private class LocalTestEntityManager extends TestEntityManager {
@@ -128,7 +130,7 @@ public <T> TypedQuery<T> createNamedQuery(String name, Class<T> resultClass) {
             //Mocking a query to return no results when 
             //checking for existing role in DB
             TypedQuery mockedQuery = mock(TypedQuery.class);
-            when(mockedQuery.setParameter(Matchers.anyString(), Matchers.anyObject())).thenReturn(mockedQuery);
+            when(mockedQuery.setParameter(ArgumentMatchers.anyString(), ArgumentMatchers.any())).thenReturn(mockedQuery);
             when(mockedQuery.getSingleResult()).thenReturn(null);
             return mockedQuery;
         }
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommandTest.java
index a0e79268e3d..70e65bfe34c 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommandTest.java
@@ -1,19 +1,20 @@
 package edu.harvard.iq.dataverse.engine.command.impl;
 
 import org.erdtman.jcs.JsonCanonicalizer;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import com.auth0.jwt.JWT;
 import com.auth0.jwt.algorithms.Algorithm;
 import com.auth0.jwt.interfaces.DecodedJWT;
 
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+
 import java.security.KeyFactory;
 import java.security.interfaces.RSAPrivateKey;
 //import java.security.interfaces.RSAPublicKey;
 import java.security.spec.PKCS8EncodedKeySpec;
 import java.util.Base64;
 
+import static org.junit.jupiter.api.Assertions.fail;
 
 public class DRSSubmitToArchiveCommandTest {
 
@@ -113,7 +114,7 @@ public void createJWT() throws CommandException {
             System.out.println(e.getClass() + e.getLocalizedMessage());
             e.printStackTrace();
             //Any exception is a failure, otherwise decoding worked.
-            Assert.fail(e.getLocalizedMessage());
+            fail(e.getLocalizedMessage());
         }
 
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java
index 74c8c269b4b..0a4e5ed2d7e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java
@@ -13,10 +13,11 @@
 import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
 import java.util.ArrayList;
 import java.util.List;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class DeletePrivateUrlCommandTest {
 
@@ -25,7 +26,7 @@ public class DeletePrivateUrlCommandTest {
     private final Long noPrivateUrlToDelete = 1l;
     private final Long hasPrivateUrlToDelete = 2l;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         testEngine = new TestDataverseEngine(new TestCommandContext() {
             @Override
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommandTest.java
index 24c48fd257b..2c9f050b92f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommandTest.java
@@ -9,8 +9,9 @@
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import java.util.ArrayList;
 import java.util.List;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 import static edu.harvard.iq.dataverse.mocks.MocksFactory.makeRequest;
 
 /**
@@ -28,9 +29,9 @@ public void testLatestPublishedNoDraft() throws CommandException {
         List<DatasetVersion> versions = make10Versions(ds);
         ds.setVersions(versions);
         
-        assertEquals( 10l, engine.submit(new GetLatestPublishedDatasetVersionCommand(makeRequest(), ds)).getVersionNumber().longValue() );
-        assertTrue( "Published datasets should require no permissions to view",
-                        engine.getReqiredPermissionsForObjects().get(ds).isEmpty() );
+        assertEquals(10L, engine.submit(new GetLatestPublishedDatasetVersionCommand(makeRequest(), ds)).getVersionNumber().longValue());
+        assertTrue(engine.getReqiredPermissionsForObjects().get(ds).isEmpty(),
+            "Published datasets should require no permissions to view");
     }
     
     @Test
@@ -41,9 +42,9 @@ public void testLatestPublishedWithDraft() throws CommandException {
         versions.add( MocksFactory.makeDatasetVersion(ds.getCategories()) );
         ds.setVersions(versions);
         
-        assertEquals( 10l, engine.submit(new GetLatestPublishedDatasetVersionCommand(makeRequest(), ds)).getVersionNumber().longValue() );
-        assertTrue( "Published datasets should require no permissions to view",
-                        engine.getReqiredPermissionsForObjects().get(ds).isEmpty() );
+        assertEquals(10L, engine.submit(new GetLatestPublishedDatasetVersionCommand(makeRequest(), ds)).getVersionNumber().longValue());
+        assertTrue(engine.getReqiredPermissionsForObjects().get(ds).isEmpty(),
+            "Published datasets should require no permissions to view");
     }
     
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetPrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetPrivateUrlCommandTest.java
index b5019807ac1..47174643a1c 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetPrivateUrlCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetPrivateUrlCommandTest.java
@@ -5,13 +5,12 @@
 import edu.harvard.iq.dataverse.engine.TestDataverseEngine;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import static org.junit.Assert.assertNull;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class GetPrivateUrlCommandTest {
 
@@ -21,15 +20,15 @@ public class GetPrivateUrlCommandTest {
     public GetPrivateUrlCommandTest() {
     }
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
 
-    @Before
+    @BeforeEach
     public void setUp() {
         testEngine = new TestDataverseEngine(new TestCommandContext() {
 
@@ -48,7 +47,7 @@ public PrivateUrl getPrivateUrlFromDatasetId(long datasetId) {
         });
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommandTest.java
index 520c91f47ff..0701454113b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommandTest.java
@@ -9,8 +9,8 @@
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.util.Arrays;
@@ -26,7 +26,7 @@ public class ListMetadataBlocksCommandTest {
     private Dataverse dataverse;
     private DataverseMetadataBlockFacet metadataBlockFacet;
 
-    @Before
+    @BeforeEach
     public void beforeEachTest() {
         dataverseRequest = Mockito.mock(DataverseRequest.class);
         dataverse = Mockito.mock(Dataverse.class);
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java
index 2474fd72004..ed6112539ed 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java
@@ -31,30 +31,30 @@
 import java.util.Date;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.Future;
-import javax.persistence.EntityGraph;
-import javax.persistence.EntityManager;
-import javax.persistence.EntityManagerFactory;
-import javax.persistence.EntityTransaction;
-import javax.persistence.FlushModeType;
-import javax.persistence.LockModeType;
-import javax.persistence.Query;
-import javax.persistence.StoredProcedureQuery;
-import javax.persistence.TypedQuery;
-import javax.persistence.criteria.CriteriaBuilder;
-import javax.persistence.criteria.CriteriaDelete;
-import javax.persistence.criteria.CriteriaQuery;
-import javax.persistence.criteria.CriteriaUpdate;
-import javax.persistence.metamodel.Metamodel;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.core.Context;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.fail;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
+
+import jakarta.persistence.EntityGraph;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.EntityManagerFactory;
+import jakarta.persistence.EntityTransaction;
+import jakarta.persistence.FlushModeType;
+import jakarta.persistence.LockModeType;
+import jakarta.persistence.Query;
+import jakarta.persistence.StoredProcedureQuery;
+import jakarta.persistence.TypedQuery;
+import jakarta.persistence.criteria.CriteriaBuilder;
+import jakarta.persistence.criteria.CriteriaDelete;
+import jakarta.persistence.criteria.CriteriaQuery;
+import jakarta.persistence.criteria.CriteriaUpdate;
+import jakarta.persistence.metamodel.Metamodel;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.core.Context;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.fail;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
 /**
  *
@@ -71,7 +71,7 @@ public class MoveDatasetCommandTest {
         @Context
         protected HttpServletRequest httpRequest;
 	
-    @Before
+    @BeforeEach
     public void setUp() {
 
         auth = makeAuthenticatedUser("Super", "User");
@@ -283,13 +283,11 @@ public void testRemoveGuestbook() throws Exception {
 	 * Moving DS to its owning DV 
         * @throws IllegalCommandException
 	 */
-    @Test(expected = IllegalCommandException.class)
-    public void testInvalidMove() throws Exception {
-
+    @Test
+    void testInvalidMove() {
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                new MoveDatasetCommand(aRequest, moved, root, false));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, root, false)));
     }
         
     /**
@@ -301,14 +299,13 @@ public void testInvalidMove() throws Exception {
      * Ignoring after permissions change in 47fb045. Did that change make this
      * case untestable? Unclear.
      */
-    @Ignore
-    @Test(expected = PermissionException.class)
-    public void testAuthenticatedUserWithNoRole() throws Exception {
+    @Disabled("Unstable test. Disabled since #5115 by @pdurbin. See commit 7a917177")
+    @Test
+    void testAuthenticatedUserWithNoRole() {
 
         DataverseRequest aRequest = new DataverseRequest(nobody, httpRequest);
-        testEngine.submit(
-                new MoveDatasetCommand(aRequest, moved, childA, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, childA, null)));
     }
 
     /**
@@ -317,25 +314,23 @@ public void testAuthenticatedUserWithNoRole() throws Exception {
      *
      * @throws java.lang.Exception
      */
-    @Test(expected = PermissionException.class)
-    public void testNotAuthenticatedUser() throws Exception {
+    @Test
+    void testNotAuthenticatedUser() {
 
         DataverseRequest aRequest = new DataverseRequest(GuestUser.get(), httpRequest);
-        testEngine.submit(
-                new MoveDatasetCommand(aRequest, moved, root, null));
-        fail();
+        assertThrows(PermissionException.class,
+            () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, root, null)));
     }
     
     	/**
 	 * Moving published  DS to unpublished DV
         * @throws IllegalCommandException
 	 */
-    @Test(expected = IllegalCommandException.class)
-    public void testInvalidMovePublishedToUnpublished() throws Exception {
+    @Test
+    void testInvalidMovePublishedToUnpublished() {
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                new MoveDatasetCommand(aRequest, moved, childDraft, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, childDraft, null)));
     }
          
         
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommandTest.java
index 7a27625f7b8..3c3188da830 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommandTest.java
@@ -25,14 +25,14 @@
 import java.util.Date;
 import java.util.List;
 import java.util.concurrent.Future;
-import javax.persistence.EntityManager;
-import javax.servlet.http.HttpServletRequest;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.fail;
-import org.junit.Before;
-import org.junit.Test;
+import jakarta.persistence.EntityManager;
+import jakarta.servlet.http.HttpServletRequest;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.fail;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 /**
  *
@@ -49,7 +49,7 @@ public class MoveDataverseCommandTest {
     AuthenticatedUser auth, nobody;
     protected HttpServletRequest httpRequest;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         // authentication 
         auth = makeAuthenticatedUser("Super", "User");
@@ -302,43 +302,39 @@ public void testValidMove() throws Exception {
     /**
      * Moving ChildA to its child (illegal).
      */
-    @Test( expected=IllegalCommandException.class )
-    public void testInvalidMove() throws Exception {
+    @Test
+    void testInvalidMove() {
         System.out.println("testInvalidMove");
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                        new MoveDataverseCommand(aRequest, childA, grandchildAA, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDataverseCommand(aRequest, childA, grandchildAA, null)));
     }
     
     /**
      * Calling API as a non super user (illegal).
      */
-    @Test(expected = PermissionException.class)
-    public void testNotSuperUser() throws Exception {
+    @Test
+    void testNotSuperUser() {
         System.out.println("testNotSuperUser");
         DataverseRequest aRequest = new DataverseRequest(nobody, httpRequest);
-        testEngine.submit(
-                        new MoveDataverseCommand(aRequest, childB, childA, null));
-        fail();
+        assertThrows(PermissionException.class,
+            () -> testEngine.submit(new MoveDataverseCommand(aRequest, childB, childA, null)));
     }
     
-    @Test( expected=IllegalCommandException.class )
-    public void testMoveIntoSelf() throws Exception {
+    @Test
+    void testMoveIntoSelf() {
         System.out.println("testMoveIntoSelf");
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                        new MoveDataverseCommand(aRequest, childB, childB, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDataverseCommand(aRequest, childB, childB, null)));
     }
     
-    @Test( expected=IllegalCommandException.class )
-    public void testMoveIntoParent() throws Exception {
+    @Test
+    void testMoveIntoParent() {
         System.out.println("testMoveIntoParent");
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                        new MoveDataverseCommand(aRequest, grandchildAA, childA, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDataverseCommand(aRequest, grandchildAA, childA, null)));
     }
     
     @Test
@@ -355,13 +351,12 @@ public void testKeepGuestbook() throws Exception {
         assertEquals( root, childC.getOwner() );
     }
     
-    @Test(expected = IllegalCommandException.class)
-    public void testRemoveGuestbookWithoutForce() throws Exception {
+    @Test
+    void testRemoveGuestbookWithoutForce() {
         System.out.println("testRemoveGuestbookWithoutForce");
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                        new MoveDataverseCommand(aRequest, grandchildCC, root, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDataverseCommand(aRequest, grandchildCC, root, null)));
     }
     
     @Test
@@ -393,13 +388,12 @@ public void testKeepTemplate() throws Exception {
         
     }
     
-    @Test(expected = IllegalCommandException.class)
-    public void testRemoveTemplateWithoutForce() throws Exception {
+    @Test
+    void testRemoveTemplateWithoutForce() {
         System.out.println("testRemoveTemplateWithoutForce");
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                        new MoveDataverseCommand(aRequest, grandchildDD, root, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDataverseCommand(aRequest, grandchildDD, root, null)));
     }
     
     @Test
@@ -430,13 +424,12 @@ public void testKeepMetadataBlock() throws Exception {
         assertEquals( root, childE.getOwner() );
     }
     
-    @Test(expected = IllegalCommandException.class)
-    public void testRemoveMetadataBlockWithoutForce() throws Exception {
+    @Test
+    void testRemoveMetadataBlockWithoutForce() {
         System.out.println("testRemoveMetadataBlockWithoutForce");
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                        new MoveDataverseCommand(aRequest, grandchildEE, root, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDataverseCommand(aRequest, grandchildEE, root, null)));
     }
     
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommandTest.java
index 5fdef3ed74c..7609ef17d3e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommandTest.java
@@ -9,14 +9,14 @@
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import static edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key.DataCaptureModuleUrl;
-import javax.servlet.http.HttpServletRequest;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import jakarta.servlet.http.HttpServletRequest;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 public class RequestRsyncScriptCommandTest {
 
@@ -26,15 +26,15 @@ public class RequestRsyncScriptCommandTest {
     public RequestRsyncScriptCommandTest() {
     }
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
 
-    @Before
+    @BeforeEach
     public void setUp() {
         testEngine = new TestDataverseEngine(new TestCommandContext() {
 
@@ -66,7 +66,7 @@ public SettingsServiceBean settings() {
         });
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RestrictFileCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RestrictFileCommandTest.java
index 7b663389a3a..2b1dbc4c64a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RestrictFileCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RestrictFileCommandTest.java
@@ -18,13 +18,15 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.sql.Timestamp;
 import java.util.Date;
-import org.junit.After;
-import org.junit.AfterClass;
-import static org.junit.Assert.assertEquals;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.AfterAll;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 
 /**
@@ -44,15 +46,15 @@ public class RestrictFileCommandTest {
     public RestrictFileCommandTest() {
     }
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
     
-    @Before
+    @BeforeEach
     public void setUp() {
         dataset = makeDataset();
         file = makeDataFile();
@@ -74,7 +76,7 @@ public boolean isTrueForKey(SettingsServiceBean.Key key, boolean defaultValue) {
             
     }
     
-    @After
+    @AfterEach
     public void tearDown() {
     }
         
@@ -247,7 +249,7 @@ public void testUnrestrictUnrestrictedNewFile() throws Exception {
         
     }
 
-    @Test 
+    @Test
     public void testPublicInstall() throws CommandException {
         file.setOwner(dataset);
         String expected = "Restricting files is not permitted on a public installation.";
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java
index a57beb172a6..23cc4547bc4 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java
@@ -24,14 +24,14 @@
 import edu.harvard.iq.dataverse.workflows.WorkflowComment;
 import java.util.Collections;
 import java.util.List;
-import java.util.concurrent.Future;
-import javax.persistence.EntityManager;
-import javax.servlet.http.HttpServletRequest;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import org.junit.Before;
-import org.junit.Test;
+
+import jakarta.persistence.EntityManager;
+import jakarta.servlet.http.HttpServletRequest;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 public class ReturnDatasetToAuthorCommandTest {
 
@@ -39,7 +39,7 @@ public class ReturnDatasetToAuthorCommandTest {
     private DataverseRequest dataverseRequest;
     private TestDataverseEngine testEngine;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         dataset = new Dataset();
 
@@ -141,9 +141,10 @@ public List<AuthenticatedUser> getUsersWithPermissionOn(Permission permission, D
             throw new IllegalCommandException("You must enter a reason for returning a dataset to its author.", this);
         }
      */
-    @Test(expected=IllegalArgumentException.class)
-    public void testDatasetNull() throws CommandException {
-        new ReturnDatasetToAuthorCommand(dataverseRequest, null, "");
+    @Test
+    void testDatasetNull() {
+        assertThrows(IllegalArgumentException.class,
+            () -> new ReturnDatasetToAuthorCommand(dataverseRequest, null, ""));
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommandTest.java
index e882560c601..700ba332247 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommandTest.java
@@ -24,13 +24,13 @@
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
 import java.util.Collections;
 import java.util.List;
-import java.util.concurrent.Future;
-import javax.persistence.EntityManager;
-import javax.servlet.http.HttpServletRequest;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import org.junit.Before;
-import org.junit.Test;
+
+import jakarta.persistence.EntityManager;
+import jakarta.servlet.http.HttpServletRequest;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 public class SubmitDatasetForReviewCommandTest {
 
@@ -38,7 +38,7 @@ public class SubmitDatasetForReviewCommandTest {
     private DataverseRequest dataverseRequest;
     private TestDataverseEngine testEngine;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         dataset = new Dataset();
 
@@ -136,9 +136,10 @@ public List<AuthenticatedUser> getUsersWithPermissionOn(Permission permission, D
         );
     }
 
-    @Test( expected=IllegalArgumentException.class )
-    public void testDatasetNull() {
-        new SubmitDatasetForReviewCommand(dataverseRequest, null);
+    @Test
+    void testDatasetNull() {
+        assertThrows(IllegalArgumentException.class,
+            () -> new SubmitDatasetForReviewCommand(dataverseRequest, null));
     }
     
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetThumbnailCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetThumbnailCommandTest.java
index f55e9a2d085..34ea7810574 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetThumbnailCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetThumbnailCommandTest.java
@@ -9,12 +9,13 @@
 import edu.harvard.iq.dataverse.engine.TestDataverseEngine;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 public class UpdateDatasetThumbnailCommandTest {
 
@@ -26,15 +27,15 @@ public class UpdateDatasetThumbnailCommandTest {
     public UpdateDatasetThumbnailCommandTest() {
     }
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
 
-    @Before
+    @BeforeEach
     public void setUp() {
         dataset = new Dataset();
         testEngine = new TestDataverseEngine(new TestCommandContext() {
@@ -86,7 +87,7 @@ public String getDataverseSiteUrl() {
         );
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetRootCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetRootCommandTest.java
index 711e7881af5..4f6a1a1f678 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetRootCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetRootCommandTest.java
@@ -8,8 +8,8 @@
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.ArgumentCaptor;
 import org.mockito.Mockito;
 
@@ -26,7 +26,7 @@ public class UpdateMetadataBlockFacetRootCommandTest {
     private DataverseRequest dataverseRequest;
     private Dataverse dataverse;
 
-    @Before
+    @BeforeEach
     public void beforeEachTest() {
         dataverseRequest = Mockito.mock(DataverseRequest.class);
         dataverse = Mockito.mock(Dataverse.class);
@@ -43,7 +43,7 @@ public void should_not_update_dataverse_when_root_value_does_not_change() throws
 
         Mockito.verify(dataverse).isMetadataBlockFacetRoot();
         Mockito.verifyNoMoreInteractions(dataverse);
-        Mockito.verifyZeroInteractions(context.dataverses());
+        Mockito.verifyNoInteractions(context.dataverses());
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetsCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetsCommandTest.java
index 2d64de80f3d..51892f20df3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetsCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetsCommandTest.java
@@ -9,8 +9,9 @@
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.util.Arrays;
@@ -26,20 +27,20 @@ public class UpdateMetadataBlockFacetsCommandTest {
     private DataverseRequest dataverseRequest;
     private Dataverse dataverse;
 
-    @Before
+    @BeforeEach
     public void beforeEachTest() {
         dataverseRequest = Mockito.mock(DataverseRequest.class);
         dataverse = Mockito.mock(Dataverse.class);
     }
 
-    @Test(expected = IllegalCommandException.class)
-    public void should_throw_IllegalCommandException_when_dataverse_is_not_metadata_facet_root() throws CommandException {
+    @Test
+    void should_throw_IllegalCommandException_when_dataverse_is_not_metadata_facet_root() {
         Mockito.when(dataverse.isMetadataBlockFacetRoot()).thenReturn(false);
 
         UpdateMetadataBlockFacetsCommand target = new UpdateMetadataBlockFacetsCommand(dataverseRequest, dataverse, Collections.emptyList());
 
         CommandContext context = Mockito.mock(CommandContext.class, Mockito.RETURNS_DEEP_STUBS);
-        target.execute(context);
+        Assertions.assertThrows(IllegalCommandException.class, () -> target.execute(context));
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdatePermissionRootCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdatePermissionRootCommandTest.java
index 3dced0aaf05..1a46a8803a6 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdatePermissionRootCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdatePermissionRootCommandTest.java
@@ -7,10 +7,11 @@
 import edu.harvard.iq.dataverse.engine.TestCommandContext;
 import edu.harvard.iq.dataverse.engine.TestDataverseEngine;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  *
@@ -22,7 +23,7 @@ public class UpdatePermissionRootCommandTest {
     TestCommandContext testCommandContext;
     boolean serviceBeanCalled;
     
-    @Before
+    @BeforeEach
     public void setUp() {
         mockBean = new DataverseServiceBean() {
             @Override
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java
index 306e2aa0928..0eb231dd866 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java
@@ -35,8 +35,8 @@
 import java.util.HashSet;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 import org.junit.jupiter.api.BeforeAll;
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExporterTest.java
index 43a05c9bd0c..2d06436fb33 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExporterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExporterTest.java
@@ -1,14 +1,11 @@
 package edu.harvard.iq.dataverse.export;
 
-import com.jayway.restassured.path.xml.XmlPath;
-import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
-import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
+import io.restassured.path.xml.XmlPath;
 import edu.harvard.iq.dataverse.util.xml.XmlPrinter;
 import io.gdcc.spi.export.ExportDataProvider;
 import io.gdcc.spi.export.XMLExporter;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
@@ -17,12 +14,12 @@
 import java.io.StringReader;
 import java.nio.file.Files;
 import java.nio.file.Paths;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 import org.xml.sax.ErrorHandler;
 import org.xml.sax.InputSource;
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java
index f10aad18496..7183f5bd497 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java
@@ -3,7 +3,6 @@
 import edu.harvard.iq.dataverse.*;
 import edu.harvard.iq.dataverse.branding.BrandingUtilTest;
 import io.gdcc.spi.export.ExportDataProvider;
-import io.gdcc.spi.export.ExportException;
 import io.gdcc.spi.export.XMLExporter;
 import edu.harvard.iq.dataverse.license.License;
 import edu.harvard.iq.dataverse.license.LicenseServiceBean;
@@ -33,7 +32,7 @@
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 
 import edu.harvard.iq.dataverse.util.testing.JvmSetting;
 import org.junit.jupiter.api.BeforeAll;
@@ -41,9 +40,7 @@
 import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
-import static org.junit.Assert.assertFalse;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  * For docs see {@link SchemaDotOrgExporter}.
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtilTest.java
index 69c8083734c..4032f4649a4 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtilTest.java
@@ -13,9 +13,9 @@
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
 import org.junit.jupiter.api.Test;
 import org.xmlunit.assertj3.XmlAssert;
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java
index ab3a0263d66..39bf96210fc 100644
--- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java
@@ -12,8 +12,8 @@
 import edu.harvard.iq.dataverse.util.testing.JvmSetting;
 import org.junit.jupiter.api.Test;
 
-import javax.json.Json;
-import javax.json.JsonObject;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertNotNull;
 import static org.junit.jupiter.api.Assertions.assertTrue;
diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java
index 7b33cb8a19f..9337949f605 100644
--- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java
@@ -11,12 +11,11 @@
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import java.util.ArrayList;
 import java.util.List;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import org.junit.Test;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.Test;
 
 public class ExternalToolServiceBeanTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java
index bbe029e77e1..ea8613b70bf 100644
--- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java
@@ -1,10 +1,11 @@
 package edu.harvard.iq.dataverse.externaltools;
 
-import javax.json.JsonObject;
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import jakarta.json.JsonObject;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 import edu.harvard.iq.dataverse.DataFileServiceBean;
+
 import java.util.ArrayList;
 import java.util.List;
 
@@ -28,13 +29,13 @@ public void testToJson() {
         externalTool.setId(42l);
         JsonObject jsonObject = externalTool.toJson().build();
         System.out.println("result: " + jsonObject);
-        assertEquals("testToJson() with ExternalTool.DISPLAY_NAME", "myDisplayName", jsonObject.getString(ExternalTool.DISPLAY_NAME));
-        assertEquals("testToJson() with ExternalTool.TOOL_NAME", "explorer", jsonObject.getString(ExternalTool.TOOL_NAME));
-        assertEquals("testToJson() with ExternalTool.DESCRIPTION", "myDescription", jsonObject.getString(ExternalTool.DESCRIPTION));
-        assertEquals("testToJson() with ExternalTool.TYPES", "explore", jsonObject.getJsonArray(ExternalTool.TYPES).getString(0));
-        assertEquals("testToJson() with ExternalTool.TOOL_URL", "http://example.com", jsonObject.getString(ExternalTool.TOOL_URL));
-        assertEquals("testToJson() with ExternalTool.TOOL_PARAMETERS", "{}", jsonObject.getString(ExternalTool.TOOL_PARAMETERS));
-        assertEquals("testToJson() with ExternalTool.CONTENT_TYPE", DataFileServiceBean.MIME_TYPE_TSV_ALT, jsonObject.getString(ExternalTool.CONTENT_TYPE));
+        assertEquals("myDisplayName", jsonObject.getString(ExternalTool.DISPLAY_NAME), "testToJson() with ExternalTool.DISPLAY_NAME");
+        assertEquals("explorer", jsonObject.getString(ExternalTool.TOOL_NAME), "testToJson() with ExternalTool.TOOL_NAME");
+        assertEquals("myDescription", jsonObject.getString(ExternalTool.DESCRIPTION), "testToJson() with ExternalTool.DESCRIPTION");
+        assertEquals("explore", jsonObject.getJsonArray(ExternalTool.TYPES).getString(0), "testToJson() with ExternalTool.TYPES");
+        assertEquals("http://example.com", jsonObject.getString(ExternalTool.TOOL_URL), "testToJson() with ExternalTool.TOOL_URL");
+        assertEquals("{}", jsonObject.getString(ExternalTool.TOOL_PARAMETERS), "testToJson() with ExternalTool.TOOL_PARAMETERS");
+        assertEquals(DataFileServiceBean.MIME_TYPE_TSV_ALT, jsonObject.getString(ExternalTool.CONTENT_TYPE), "testToJson() with ExternalTool.CONTENT_TYPE");
     }
 
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/feedback/FeedbackUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/feedback/FeedbackUtilTest.java
index 47e0f6da20e..7c31db5bee2 100644
--- a/src/test/java/edu/harvard/iq/dataverse/feedback/FeedbackUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/feedback/FeedbackUtilTest.java
@@ -5,7 +5,6 @@
 import edu.harvard.iq.dataverse.DataFileCategory;
 import edu.harvard.iq.dataverse.DataFileTag;
 import edu.harvard.iq.dataverse.Dataset;
-import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
 import edu.harvard.iq.dataverse.DatasetFieldType;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.Dataverse;
@@ -27,21 +26,18 @@
 import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.mail.internet.AddressException;
-import javax.mail.internet.InternetAddress;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.mail.internet.AddressException;
+import jakarta.mail.internet.InternetAddress;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
 import org.mockito.Mockito;
 
 public class FeedbackUtilTest {
@@ -62,7 +58,7 @@ public class FeedbackUtilTest {
     private static final String systemEmail = "support@librascholar.edu";
     private static final boolean weKnowHowToCreateMockAuthenticatedUsers = false;
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() throws IOException, JsonParseException, AddressException {
 
         if (weKnowHowToCreateMockAuthenticatedUsers) {
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestFrequencyTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestFrequencyTest.java
index cb0655c068f..96e314324ab 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestFrequencyTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestFrequencyTest.java
@@ -6,9 +6,9 @@
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataFileReader;
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataIngest;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
-import javax.ejb.EJB;
+import jakarta.ejb.EJB;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
@@ -16,8 +16,8 @@
 import java.io.BufferedInputStream;
 import java.util.Collection;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class IngestFrequencyTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java
index ca68af4090c..4dfedf5aa17 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java
@@ -17,13 +17,13 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.validation.ConstraintViolation;
+
+import jakarta.validation.ConstraintViolation;
 import org.dataverse.unf.UNFUtil;
 import org.dataverse.unf.UnfException;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 public class IngestUtilTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestableDataCheckerTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestableDataCheckerTest.java
index ea9e378739b..11257f188fe 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestableDataCheckerTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestableDataCheckerTest.java
@@ -10,15 +10,18 @@
 import java.io.IOException;
 import java.nio.MappedByteBuffer;
 import java.nio.channels.FileChannel;
-import org.apache.commons.io.FileUtils;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
-import org.junit.Rule;
-import org.junit.rules.TemporaryFolder;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.List;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -26,26 +29,23 @@
  */
 public class IngestableDataCheckerTest {
    
-    @Rule
-    public TemporaryFolder tempFolder = new TemporaryFolder();
-   
     public IngestableDataCheckerTest() {
     }
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
 
     }
     
-    @Before
+    @BeforeEach
     public void setUp() {     
     }
     
-    @After
+    @AfterEach
     public void tearDown() {
         
      
@@ -57,14 +57,14 @@ private File createTempFile(String filename, String fileContents) throws IOExcep
         if (filename == null){
             return null;
         }
-        File fh = this.tempFolder.newFile(filename);
-        fh.createNewFile();
+        
+        Path tmpFile = Files.createTempFile("ingestdatachecker", "");
         
         if (fileContents != null){
-            FileUtils.writeStringToFile(fh, fileContents);
+            Files.writeString(tmpFile, fileContents, StandardCharsets.UTF_8);
         }
         
-        return fh;
+        return tmpFile.toFile();
     }
     
     private MappedByteBuffer createTempFileAndGetBuffer(String filename, String fileContents) throws IOException {
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/NetcdfFileMetadataExtractorTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/NetcdfFileMetadataExtractorTest.java
index 203fc96e70a..343d7f39cf5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/NetcdfFileMetadataExtractorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/NetcdfFileMetadataExtractorTest.java
@@ -5,8 +5,7 @@
 import java.io.File;
 import java.util.Map;
 import java.util.Set;
-import static org.junit.Assert.assertNull;
-import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
 import org.junit.jupiter.api.Test;
 
 public class NetcdfFileMetadataExtractorTest {
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReaderTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReaderTest.java
index cdc4249ba94..fc066ef195e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReaderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReaderTest.java
@@ -22,8 +22,9 @@
 import java.util.logging.Logger;
 import org.dataverse.unf.UNFUtil;
 import org.dataverse.unf.UnfException;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -67,7 +68,7 @@ public void testRead() {
             } catch (IOException ex) {
                 fail();
             }
-            assertEquals("Error on line " + line, expLine, foundLine);
+            assertEquals(expLine, foundLine, "Error on line " + line);
             line++;
         }
 
@@ -121,15 +122,15 @@ public void testVariables() {
         // OK, let's go through the individual variables:
         for (int i = 0; i < result.getVarQuantity(); i++) {
 
-            assertEquals("variable " + i + ":", expectedVariableNames[i], result.getDataVariables().get(i).getName());
+            assertEquals(expectedVariableNames[i], result.getDataVariables().get(i).getName(), "variable " + i + ":");
 
-            assertEquals("variable " + i + ":", expectedVariableTypes[i], result.getDataVariables().get(i).getType());
+            assertEquals(expectedVariableTypes[i], result.getDataVariables().get(i).getType(), "variable " + i + ":");
 
-            assertEquals("variable " + i + ":", expectedVariableIntervals[i], result.getDataVariables().get(i).getInterval());
+            assertEquals(expectedVariableIntervals[i], result.getDataVariables().get(i).getInterval(), "variable " + i + ":");
 
-            assertEquals("variable " + i + ":", expectedVariableFormatCategories[i], result.getDataVariables().get(i).getFormatCategory());
+            assertEquals(expectedVariableFormatCategories[i], result.getDataVariables().get(i).getFormatCategory(), "variable " + i + ":");
 
-            assertEquals("variable " + i + ":", expectedVariableFormats[i], result.getDataVariables().get(i).getFormat());
+            assertEquals(expectedVariableFormats[i], result.getDataVariables().get(i).getFormat(), "variable " + i + ":");
         }
     }
 
@@ -196,7 +197,7 @@ public void testSubset() {
 
             Double[] columnVector = TabularSubsetGenerator.subsetDoubleVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue());
 
-            assertArrayEquals("column " + i + ":", floatVectors[vectorCount++], columnVector);
+            assertArrayEquals(floatVectors[vectorCount++], columnVector, "column " + i + ":");
         }
 
         // Discrete Numerics (aka, integers):
@@ -230,7 +231,7 @@ public void testSubset() {
 
             Long[] columnVector = TabularSubsetGenerator.subsetLongVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue());
 
-            assertArrayEquals("column " + i + ":", longVectors[vectorCount++], columnVector);
+            assertArrayEquals(longVectors[vectorCount++], columnVector, "column " + i + ":");
         }
 
         // And finally, Strings:
@@ -257,7 +258,7 @@ public void testSubset() {
 
             String[] columnVector = TabularSubsetGenerator.subsetStringVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue());
 
-            assertArrayEquals("column " + i + ":", stringVectors[vectorCount++], columnVector);
+            assertArrayEquals(stringVectors[vectorCount++], columnVector, "column " + i + ":");
         }
     }
 
@@ -387,7 +388,7 @@ public void testVariableUNFs() {
                 }
             }
 
-            assertEquals("Variable number " + i + ":", expectedUNFs[i], unf);
+            assertEquals(expectedUNFs[i], unf, "Variable number " + i + ":");
         }
 
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReaderTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReaderTest.java
index 2f8908c5920..113e9be6b54 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReaderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReaderTest.java
@@ -5,8 +5,9 @@
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class DTAFileReaderTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReaderTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReaderTest.java
index 8ac84d9693a..a181f73c058 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReaderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReaderTest.java
@@ -4,8 +4,8 @@
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  * @author oscardssmith
@@ -39,12 +39,12 @@ public void testReadUShort() throws IOException {
     }
     
     // This should throw until we figure out what to do with uLongs that are large
-    @Test(expected = IOException.class)
-    public void testReadULong() throws IOException {
+    @Test
+    void testReadULong() throws IOException {
         byte[] bytes = {-1,-1,-1,-1,-1,-1,-1,-1,};
         BufferedInputStream stream = new BufferedInputStream(new ByteArrayInputStream(bytes));
         DataReader reader = new DataReader(stream);
         reader.setLSF(true);
-        assertEquals(-1, reader.readULong());
+        assertThrows(IOException.class, () -> reader.readULong());
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReaderTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReaderTest.java
index 3c8c0a0d224..c963346b05e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReaderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReaderTest.java
@@ -4,19 +4,16 @@
 import edu.harvard.iq.dataverse.datavariable.DataVariable;
 import edu.harvard.iq.dataverse.datavariable.VariableCategory;
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataIngest;
-import edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.dta.DataReader;
+
 import java.io.BufferedInputStream;
-import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
-import java.nio.ByteBuffer;
 import java.util.List;
 import org.apache.commons.io.FileUtils;
-import org.junit.Test;
-import static org.junit.Assert.*;
-import org.junit.Ignore;
-import org.junit.Assert;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class NewDTAFileReaderTest {
     NewDTAFileReader instance;
@@ -51,7 +48,7 @@ public void testStrl() throws IOException {
         
         String[] vars = {"make","price","mpg","rep78","trunk","gear_ratio","strls"};
         String[] actualVars = table.getDataVariables().stream().map((var) -> var.getName()).toArray(String[]::new);
-        Assert.assertArrayEquals(vars, actualVars);
+        assertArrayEquals(vars, actualVars);
         String expected = "\"Buick LeSabre\"	5788	1.1111111111111111E21	100	32767	2.73	\"a\"\n" +
                           "\"Buick Opel\"	4453	26.0		10	2.87	\"bb\"\n" +
                           "\"Buick Regal\"	5189	20.0	3	16	2.93	\"ccc\"\n";
@@ -69,7 +66,7 @@ public void testDates() throws IOException {
         assertEquals(4, (long)table.getCaseQuantity());
         String[] vars = {"Clock","Daily","Weekly","Monthly","Quarterly","BiAnnually","Annually"};
         String[] actualVars = table.getDataVariables().stream().map((var) -> var.getName()).toArray(String[]::new);
-        Assert.assertArrayEquals(vars, actualVars);
+        assertArrayEquals(vars, actualVars);
         String expected = "2595-09-27 06:58:52.032	2018-06-20	2018-11-05	2018-06-01	2018-01-01	2018-01-01	2018\n" +
                           "2595-09-27 06:58:52.032	2018-06-20	2018-11-05	2018-06-01	2018-04-01	2018-01-01	2018\n" +
                           "2595-09-27 06:58:52.032	2018-06-20	2018-11-05	2018-06-01	2018-07-01	2018-07-01	2018\n" +
@@ -77,14 +74,14 @@ public void testDates() throws IOException {
         assertEquals(expected, FileUtils.readFileToString(result.getTabDelimitedFile()));
     }
     
-    @Test(expected = IOException.class)
-    public void testNull() throws IOException {
+    @Test
+    void testNull() {
         instance = new NewDTAFileReader(null, 117);
-        TabularDataIngest result = instance.read(null, new File(""));
+        assertThrows(IOException.class, () -> instance.read(null, new File("")));
     }
 
     // TODO: Can we create a small file to check into the code base that exercises the value-label names non-zero offset issue?
-    @Ignore
+    @Disabled
     @Test
     public void testFirstCategoryNonZeroOffset() throws IOException {
         instance = new NewDTAFileReader(null, 117);
@@ -105,7 +102,7 @@ public void testFirstCategoryNonZeroOffset() throws IOException {
     }
 
     // TODO: Can we create a small file to check into the code base that exercises the value-label names non-zero offset issue?
-    @Ignore
+    @Disabled
     @Test
     public void testFirstCategoryNonZeroOffset1() throws IOException {
         instance = new NewDTAFileReader(null, 118);
@@ -125,7 +122,7 @@ public void testFirstCategoryNonZeroOffset1() throws IOException {
     }
     
     // TODO: Is there a way to exersise this code with a smaller file? 33k.dta is 21MB.
-    @Ignore
+    @Disabled
     @Test
     public void test33k() throws IOException {
         instance = new NewDTAFileReader(null, 119);
@@ -135,7 +132,7 @@ public void test33k() throws IOException {
     
     // TODO: Can we create a small file to check into the code base that exercises the characteristics issue?
     // FIXME: testCharacteristics is passing in DTA117FileReaderTest but not here.
-    @Ignore
+    @Disabled
     @Test
     public void testCharacteristics() throws IOException {
         instance = new NewDTAFileReader(null, 117);
diff --git a/src/test/java/edu/harvard/iq/dataverse/locality/StorageSiteUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/locality/StorageSiteUtilTest.java
index eb9562a2a69..b2f70ba2675 100644
--- a/src/test/java/edu/harvard/iq/dataverse/locality/StorageSiteUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/locality/StorageSiteUtilTest.java
@@ -3,9 +3,12 @@
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import java.util.ArrayList;
 import java.util.List;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import org.junit.Test;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertThrows;
 
 public class StorageSiteUtilTest {
 
@@ -22,44 +25,47 @@ public void testParse() throws Exception {
         System.out.println("output: " + output);
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testMissingHostname() throws Exception {
+    @Test
+    void testMissingHostname() {
         JsonObjectBuilder job = Json.createObjectBuilder();
         job.add(StorageSite.NAME, "myName");
         job.add(StorageSite.PRIMARY_STORAGE, true);
         job.add(StorageSite.TRANSFER_PROTOCOLS, "rsync");
-        StorageSiteUtil.parse(job.build());
+        JsonObject sut = job.build();
+        assertThrows(IllegalArgumentException.class, () -> StorageSiteUtil.parse(sut));
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testBadProtocol() throws Exception {
+    @Test
+    void testBadProtocol() {
         JsonObjectBuilder job = Json.createObjectBuilder();
         job.add(StorageSite.HOSTNAME, "myHostname");
         job.add(StorageSite.NAME, "myName");
         job.add(StorageSite.PRIMARY_STORAGE, true);
         job.add(StorageSite.TRANSFER_PROTOCOLS, "junk");
-        StorageSiteUtil.parse(job.build());
+        JsonObject sut = job.build();
+        assertThrows(IllegalArgumentException.class, () -> StorageSiteUtil.parse(sut));
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testNonBoolean() throws Exception {
+    @Test
+    void testNonBoolean() {
         JsonObjectBuilder job = Json.createObjectBuilder();
         job.add(StorageSite.HOSTNAME, "myHostname");
         job.add(StorageSite.NAME, "myName");
         job.add(StorageSite.PRIMARY_STORAGE, "not a boolean");
         job.add(StorageSite.TRANSFER_PROTOCOLS, "rsync");
-        StorageSiteUtil.parse(job.build());
+        JsonObject sut = job.build();
+        assertThrows(IllegalArgumentException.class, () -> StorageSiteUtil.parse(sut));
     }
 
-    @Test(expected = Exception.class)
-    public void testSecondPrimaryNotAllowed() throws Exception {
+    @Test
+    void testSecondPrimaryNotAllowed() {
         StorageSite newStorageSite = new StorageSite();
         newStorageSite.setPrimaryStorage(true);
         List<StorageSite> exitingSites = new ArrayList<>();
         StorageSite existingSite1 = new StorageSite();
         existingSite1.setPrimaryStorage(true);
         exitingSites.add(existingSite1);
-        StorageSiteUtil.ensureOnlyOnePrimary(newStorageSite, exitingSites);
+        assertThrows(Exception.class, () -> StorageSiteUtil.ensureOnlyOnePrimary(newStorageSite, exitingSites));
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBeanTest.java
index 6fa9ff1a8e9..61be14f41aa 100644
--- a/src/test/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBeanTest.java
@@ -3,11 +3,11 @@
 import edu.harvard.iq.dataverse.Dataset;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
-import org.mockito.Matchers;
 
-import javax.ejb.EJBException;
-import javax.persistence.EntityManager;
-import javax.persistence.Query;
+import jakarta.ejb.EJBException;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.Query;
+import org.mockito.ArgumentMatchers;
 
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -36,7 +36,7 @@ void setup() {
     @Test
     void testGetDatasetMetricsByDatasetMonthCountry_withoutResults() {
         when(query.getResultList()).thenReturn(new ArrayList());
-        when(this.serviceBean.em.createQuery(Matchers.anyString())).thenReturn(query);
+        when(this.serviceBean.em.createQuery(ArgumentMatchers.anyString())).thenReturn(query);
 
         assertNull(serviceBean.getDatasetMetricsByDatasetMonthCountry(dataset, "01-01", "CH"));
     }
@@ -44,7 +44,7 @@ void testGetDatasetMetricsByDatasetMonthCountry_withoutResults() {
     @Test
     void testGetDatasetMetricsByDatasetMonthCountry_throwsForMultipleResults() {
         when(query.getResultList()).thenReturn(Arrays.asList(1, 2));
-        when(this.serviceBean.em.createQuery(Matchers.anyString())).thenReturn(query);
+        when(this.serviceBean.em.createQuery(ArgumentMatchers.anyString())).thenReturn(query);
 
         assertThrows(EJBException.class, () -> {
             serviceBean.getDatasetMetricsByDatasetMonthCountry(dataset, "01-01", "CH");
@@ -65,7 +65,7 @@ void testGetDatasetMetricsByDatasetMonthCountry_aggregatesForSingleResult() {
         datasetMetrics.setDownloadsUniqueMachine(8L);
 
         when(query.getResultList()).thenReturn(Arrays.asList(datasetMetrics));
-        when(this.serviceBean.em.createQuery(Matchers.anyString())).thenReturn(query);
+        when(this.serviceBean.em.createQuery(ArgumentMatchers.anyString())).thenReturn(query);
 
         DatasetMetrics result = serviceBean.getDatasetMetricsByDatasetMonthCountry(dataset, "04.2019", "CH");
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBeanTest.java
index a630faf9247..c1051a57db8 100644
--- a/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBeanTest.java
@@ -16,11 +16,13 @@
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import java.util.Date;
-import javax.faces.context.FacesContext;
+
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.CoreMatchers.not;
-import static org.junit.Assert.assertThat;
-import org.junit.Test;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+import org.hamcrest.MatcherAssert;
+import org.junit.jupiter.api.Test;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtilTest.java
index 4e034f0d314..56e786714b6 100644
--- a/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtilTest.java
@@ -4,10 +4,11 @@
 import java.io.FileReader;
 import java.io.IOException;
 import java.util.List;
-import javax.json.Json;
-import javax.json.JsonObject;
-import org.junit.Assert;
-import org.junit.Test;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class MakeDataCountUtilTest {
 
@@ -30,7 +31,7 @@ public void testParseCitations() {
         try (FileReader reader = new FileReader("src/test/java/edu/harvard/iq/dataverse/makedatacount/citations-for-doi-10.7910-DVN-HQZOOB.json")) {
             report = Json.createReader(reader).readObject();
             List<DatasetExternalCitations> datasetExternalCitations = MakeDataCountUtil.parseCitations(report);
-            Assert.assertEquals(2, datasetExternalCitations.size());
+            assertEquals(2, datasetExternalCitations.size());
         } catch (FileNotFoundException ex) {
             System.out.print("File not found: " + ex.getMessage());
         } catch (IOException ex) {
diff --git a/src/test/java/edu/harvard/iq/dataverse/metrics/MetricsUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/metrics/MetricsUtilTest.java
index 9aa4c9c6723..484ce2ebe47 100644
--- a/src/test/java/edu/harvard/iq/dataverse/metrics/MetricsUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/metrics/MetricsUtilTest.java
@@ -8,21 +8,19 @@
 import java.util.List;
 import java.util.Arrays;
 import java.util.Collection;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-import org.junit.Test;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
-
-@RunWith(Enclosed.class)
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.fail;
+
 public class MetricsUtilTest {
 
     public static class MetricsUtilNoParamTest {
@@ -146,23 +144,23 @@ public void testDataversesBySubjectToJson() {
         }
 
         @Test
-        public void testSanitizeHappyPath() throws Exception {
+        void testSanitizeHappyPath() {
             assertEquals("2018-04", MetricsUtil.sanitizeYearMonthUserInput("2018-04"));
         }
 
-        @Test(expected = Exception.class)
-        public void testSanitizeJunk() throws Exception {
-            MetricsUtil.sanitizeYearMonthUserInput("junk");
+        @Test
+        void testSanitizeJunk() {
+            assertThrows(Exception.class, () -> MetricsUtil.sanitizeYearMonthUserInput("junk"));
         }
 
-        @Test(expected = Exception.class)
-        public void testSanitizeFullIso() throws Exception {
-            MetricsUtil.sanitizeYearMonthUserInput("2018-01-01");
+        @Test
+        void testSanitizeFullIso() {
+            assertThrows(Exception.class, () -> MetricsUtil.sanitizeYearMonthUserInput("2018-01-01"));
         }
 
-        @Test(expected = Exception.class)
-        public void testSanitizeYearMonthUserInputIsAfterCurrentDate() throws Exception {
-            MetricsUtil.sanitizeYearMonthUserInput("2099-01");
+        @Test
+        void testSanitizeYearMonthUserInputIsAfterCurrentDate() {
+            assertThrows(Exception.class, () -> MetricsUtil.sanitizeYearMonthUserInput("2099-01"));
         }
 
         @Test
@@ -207,42 +205,20 @@ public void testStringToJsonObjectBuilder() {
         }
 
     }
-
-    @RunWith(Parameterized.class)
-    public static class ValidateDataLocationStringTypeTest {
-        @Parameter
-        public String dataLocation;
-
-        @Parameter(1)
-        public boolean isExceptionExpected;
-
-        @Parameter(2)
-        public String expectedOutput;
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(new Object[][] { 
-                { "local", false, "local" }, 
-                { "remote", false, "remote" },
-                { "all", false, "all" }, 
-                { null, false, "local" }, 
-                { "", false, "local" },
-                { "abcd", true, null } 
-            });
-        }
-
-        @Test
-        public void testValidateDataLocationStringType() {
-            try {
-                assertEquals(expectedOutput, MetricsUtil.validateDataLocationStringType(dataLocation));
-            } catch (Exception e) {
-                if (isExceptionExpected) {
-                    return;
-                } else {
-                    fail("should not throw an exception!");
-                }
-            }
-        }
-
+    
+    @ParameterizedTest
+    @CsvSource(value = {
+        "local,false,local",
+        "remote,false,remote",
+        "all,false,all",
+        "NULL,false,local",
+        "'',false,local",
+        "abcd,true,NULL"
+    }, nullValues = "NULL")
+    void testValidateDataLocationStringType(String dataLocation, boolean isExceptionExpected, String expectedOutput) {
+        if (isExceptionExpected)
+            assertThrows(Exception.class, () -> MetricsUtil.validateDataLocationStringType(dataLocation));
+        else
+            assertEquals(expectedOutput, MetricsUtil.validateDataLocationStringType(dataLocation));
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/mocks/MockDatasetFieldSvc.java b/src/test/java/edu/harvard/iq/dataverse/mocks/MockDatasetFieldSvc.java
index a8177537d5f..22936def497 100644
--- a/src/test/java/edu/harvard/iq/dataverse/mocks/MockDatasetFieldSvc.java
+++ b/src/test/java/edu/harvard/iq/dataverse/mocks/MockDatasetFieldSvc.java
@@ -3,7 +3,7 @@
 import java.util.HashMap;
 import java.util.Map;
 
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 
 import edu.harvard.iq.dataverse.ControlledVocabularyValue;
 import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
diff --git a/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java b/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java
index 7d6e663a547..927d288d660 100644
--- a/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java
+++ b/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java
@@ -143,6 +143,7 @@ public static Dataset makeDataset() {
         Dataset ds = new Dataset();
         ds.setId( nextId() );
         ds.setIdentifier("sample-ds-" + ds.getId() );
+        ds.setAuthority("10.5072");
         ds.setCategoriesByName( Arrays.asList("CatOne", "CatTwo", "CatThree") );
         final List<DataFile> files = makeFiles(10);
         final List<FileMetadata> metadatas = new ArrayList<>(10);
diff --git a/src/test/java/edu/harvard/iq/dataverse/mydata/MyDataUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/mydata/MyDataUtilTest.java
index 69996ce71fe..8cf5b0a3f44 100644
--- a/src/test/java/edu/harvard/iq/dataverse/mydata/MyDataUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/mydata/MyDataUtilTest.java
@@ -1,88 +1,82 @@
 package edu.harvard.iq.dataverse.mydata;
 
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.EmptySource;
+import org.junit.jupiter.params.provider.MethodSource;
+import org.junit.jupiter.params.provider.NullSource;
 
-import org.junit.experimental.theories.DataPoints;
-import org.junit.experimental.theories.Theories;
-import org.junit.experimental.theories.Theory;
-import org.junit.runner.RunWith;
+import java.util.List;
 
-/**
- * Theories allows to add more formal tests to our code. In a way JUnit Theories behave 
- * much like mathematical theories that hold for every element of a large (infinite) set. 
- * JUnit will combine every possible combination (cartesian product) of datapoints and 
- * pass these to the tests annotated with @Theory. The assume statements make sure, only 
- * valid datapoints are tested in each Theory.
- * 
- * @Datapoints - defines an array of values to test on
- * @Datapoint - stores one single value
- * 
- * JUnit will no longer maintain a JUnit 4 Theories equivalent in the JUnit 5 codebase, as 
- * mentioned in a discussion here: https://github.com/junit-team/junit5/pull/1422#issuecomment-389644868
- */
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
 
-@RunWith(Theories.class)
-public class MyDataUtilTest {
+class MyDataUtilTest {
 
-    @DataPoints
-    public static String[] userIdentifier = { 
-        "@nzaugg", "nzaugg@", "nzaugg", "123nzaugg", "", " ", null, "@",  "n" };
+    static List<String> userIdentifier() {
+        return List.of("@nzaugg", "nzaugg@", "nzaugg", "123nzaugg", " ", "@", "n");
+    }
 
-    @Theory
-    public void testFormatUserIdentifierAsAssigneeIdentifierNull(String userIdentifier) {
-        assumeTrue(userIdentifier == null);
+    @ParameterizedTest
+    @NullSource
+    void testFormatUserIdentifierAsAssigneeIdentifierNull(String userIdentifier) {
         String formattedUserIdentifier = MyDataUtil.formatUserIdentifierAsAssigneeIdentifier(userIdentifier);
-        assertTrue(formattedUserIdentifier ==  null);
+        assertNull(formattedUserIdentifier);
     }
-
-    @Theory
-    public void testFormatUserIdentifierAsAssigneeIdentifierOneCharString(String userIdentifier) {
-        assumeTrue(userIdentifier != null);
+    
+    @ParameterizedTest
+    @MethodSource("userIdentifier")
+    void testFormatUserIdentifierAsAssigneeIdentifierOneCharString(String userIdentifier) {
         assumeTrue(userIdentifier.startsWith("@"));
+        
         String formattedUserIdentifier = MyDataUtil.formatUserIdentifierAsAssigneeIdentifier(userIdentifier);
-        assertTrue(formattedUserIdentifier.equals(userIdentifier));
+        assertEquals(formattedUserIdentifier, userIdentifier);
     }
-
-    @Theory
-    public void testFormatUserIdentifierAsAssigneeIdentifier(String userIdentifier) {
-        assumeTrue(userIdentifier != null);
+    
+    @ParameterizedTest
+    @MethodSource("userIdentifier")
+    void testFormatUserIdentifierAsAssigneeIdentifier(String userIdentifier) {
         assumeTrue(!userIdentifier.startsWith("@"));
+        
         String formattedUserIdentifier = MyDataUtil.formatUserIdentifierAsAssigneeIdentifier(userIdentifier);
-        assertTrue(formattedUserIdentifier.equals("@" + userIdentifier));
+        assertEquals(formattedUserIdentifier, "@" + userIdentifier);
     }
-
-    @Theory
-    public void testFormatUserIdentifierForMyDataFormNull(String userIdentifier) {
-        assumeTrue(userIdentifier == null);
+    
+    @ParameterizedTest
+    @NullSource
+    void testFormatUserIdentifierForMyDataFormNull(String userIdentifier) {
         String formattedUserIdentifier = MyDataUtil.formatUserIdentifierForMyDataForm(userIdentifier);
-        assertTrue(formattedUserIdentifier ==  null);
+        assertNull(formattedUserIdentifier);
     }
-
-    @Theory
-    public void testFormatUserIdentifierForMyDataFormOneCharString(String userIdentifier) {
-        assumeTrue(userIdentifier != null);
+    
+    @ParameterizedTest
+    @MethodSource("userIdentifier")
+    void testFormatUserIdentifierForMyDataFormOneCharString(String userIdentifier) {
         assumeTrue(userIdentifier.startsWith("@"));
         assumeTrue(userIdentifier.length() == 1);
+        
         String formattedUserIdentifier = MyDataUtil.formatUserIdentifierForMyDataForm(userIdentifier);
-        assertTrue(formattedUserIdentifier ==  null);
+        assertNull(formattedUserIdentifier);
     }
-
-    @Theory
-    public void testFormatUserIdentifierForMyDataFormLongerString(String userIdentifier) {
-        assumeTrue(userIdentifier != null);
+    
+    @ParameterizedTest
+    @MethodSource("userIdentifier")
+    void testFormatUserIdentifierForMyDataFormLongerString(String userIdentifier) {
         assumeTrue(userIdentifier.startsWith("@"));
         assumeTrue(userIdentifier.length() > 1);
+        
         String formattedUserIdentifier = MyDataUtil.formatUserIdentifierForMyDataForm(userIdentifier);
-        assertTrue(formattedUserIdentifier.equals(userIdentifier.substring(1)));
+        assertEquals(formattedUserIdentifier, userIdentifier.substring(1));
     }
-
-    @Theory
-    public void testFormatUserIdentifierForMyDataForm(String userIdentifier) {
-        assumeTrue(userIdentifier != null);
+    
+    @ParameterizedTest
+    @MethodSource("userIdentifier")
+    @EmptySource
+    void testFormatUserIdentifierForMyDataForm(String userIdentifier) {
         assumeTrue(!userIdentifier.startsWith("@"));
+        
         String formattedUserIdentifier = MyDataUtil.formatUserIdentifierForMyDataForm(userIdentifier);
-        assertTrue(formattedUserIdentifier.equals(userIdentifier));
+        assertEquals(formattedUserIdentifier, userIdentifier);
     }
 
 }
\ No newline at end of file
diff --git a/src/test/java/edu/harvard/iq/dataverse/mydata/SolrQueryFormatterTest.java b/src/test/java/edu/harvard/iq/dataverse/mydata/SolrQueryFormatterTest.java
index c15bc280316..789204c1db4 100644
--- a/src/test/java/edu/harvard/iq/dataverse/mydata/SolrQueryFormatterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/mydata/SolrQueryFormatterTest.java
@@ -5,8 +5,9 @@
  */
 package edu.harvard.iq.dataverse.mydata;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import java.lang.NullPointerException;
 import java.util.ArrayList;
@@ -16,20 +17,15 @@
 import java.util.List;
 import java.util.Random;
 import java.util.Set;
+import java.util.stream.Stream;
 
 import org.apache.commons.lang3.StringUtils;
-import org.junit.Test;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
-
-/**
- *
- * @author rmp553
- */
-@RunWith(Enclosed.class)
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
+
+
 public class SolrQueryFormatterTest {
 
     public static class SolrQueryFormatterNoParamTest {
@@ -130,28 +126,7 @@ private void msgt(String s){
         }
     }
 
-    @RunWith(Parameterized.class)
-    public static class SolrQueryFormatterParamTest {
-
-        @Parameter(0)
-        public List<Long> sliceOfIds;
-
-        @Parameter(1)
-        public String paramName;
-
-        @Parameter(2)
-        public String dvObjectType;
-
-        // may be either
-        //  (i) the expected query part or
-        // (ii) the expected exception message
-        @Parameter(3)
-        public String expectedResult;
-
-        @Parameter(4)
-        public Class expectedException;
-
-        @Parameters
+    /*
         public static Collection data() {
             // The following list of test cases was compiled using the interface-based approach for input-space partition.
             // Therefor, for every input parameter, the domain of possible values was partitioned into different sets:
@@ -212,24 +187,90 @@ public static Collection data() {
                 { new ArrayList<Long>(Arrays.asList(1L, null)), "paramName", "dvObjectType", "(paramName:(1) AND dvObjectType:(dvObjectType))", null },
             });
         }
-
-        @Test
-        public void testFormatIdsForSolrClause() {
-            SolrQueryFormatter sqf = new SolrQueryFormatter();
-
-            if (expectedException == null) {
-                assertEquals(expectedResult, sqf.formatIdsForSolrClause(sliceOfIds, paramName, dvObjectType));
-                return;
-            }
-
-            try {
-                sqf.formatIdsForSolrClause(sliceOfIds, paramName, dvObjectType);
-                fail("Expected exception (" + expectedException.toString() + ") was not thrown");
-            } catch (Exception ex) {
-                assertEquals("verify the exception class", expectedException, ex.getClass());
-                assertEquals("verify the exception message", expectedResult, ex.getMessage());
-            }
+     */
+    
+    /*
+     * The following list of test cases was compiled using the interface-based approach for input-space partition.
+     * Therefor, for every input parameter, the domain of possible values was partitioned into different sets:
+     *    - sliceOfIds   (5 sets): null, empty, non-empty with null values only, non-empty with Long values only, non-empty with both null and Long values
+     *    - paramName    (3 sets): null, empty, non-empty
+     *    - dvObjectType (3 sets): null, empty, non-empty
+     * Then, for every set, a representative value was chosen and combined with every other set (3*3*5 = 45 test cases).
+     */
+    static Stream<Arguments> data() {
+        return Stream.of(
+            // sliceOfIds                   paramName    dvObjectType    expectedResult                                     expectedException
+            Arguments.of(null,              null,        null,           "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(null,              null,        "",             "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(null,              null,        "dvObjectType", "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(null,              "",          null,           "sliceOfIds cannot be null",                       NullPointerException.class),
+            Arguments.of(null,              "",          "",             "sliceOfIds cannot be null",                       NullPointerException.class),
+            Arguments.of(null,              "",          "dvObjectType", "sliceOfIds cannot be null",                       NullPointerException.class),
+            Arguments.of(null,              "paramName", null,           "sliceOfIds cannot be null",                       NullPointerException.class),
+            Arguments.of(null,              "paramName", "",             "sliceOfIds cannot be null",                       NullPointerException.class),
+            Arguments.of(null,              "paramName", "dvObjectType", "sliceOfIds cannot be null",                       NullPointerException.class),
+            
+            Arguments.of(list(),            null,        null,           "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(),            null,        "",             "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(),            null,        "dvObjectType", "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(),            "",          null,           "sliceOfIds must have at least 1 value",           IllegalStateException.class),
+            Arguments.of(list(),            "",          "",             "sliceOfIds must have at least 1 value",           IllegalStateException.class),
+            Arguments.of(list(),            "",          "dvObjectType", "sliceOfIds must have at least 1 value",           IllegalStateException.class),
+            Arguments.of(list(),            "paramName", null,           "sliceOfIds must have at least 1 value",           IllegalStateException.class),
+            Arguments.of(list(),            "paramName", "",             "sliceOfIds must have at least 1 value",           IllegalStateException.class),
+            Arguments.of(list(),            "paramName", "dvObjectType", "sliceOfIds must have at least 1 value",           IllegalStateException.class),
+            
+            Arguments.of(list((Long) null), null,        null,           "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list((Long) null), null,        "",             "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list((Long) null), null,        "dvObjectType", "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list((Long) null), "",          null,           "(:())",                                           null),
+            Arguments.of(list((Long) null), "",          "",             "(:() AND dvObjectType:())",                       null),
+            Arguments.of(list((Long) null), "",          "dvObjectType", "(:() AND dvObjectType:(dvObjectType))",           null),
+            Arguments.of(list((Long) null), "paramName", null,           "(paramName:())",                                  null),
+            Arguments.of(list((Long) null), "paramName", "",             "(paramName:() AND dvObjectType:())",              null),
+            Arguments.of(list((Long) null), "paramName", "dvObjectType", "(paramName:() AND dvObjectType:(dvObjectType))",  null),
+            
+            Arguments.of(list(1L),          null,        null,           "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(1L),          null,        "",             "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(1L),          null,        "dvObjectType", "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(1L),          "",          null,           "(:(1))",                                          null),
+            Arguments.of(list(1L),          "",          "",             "(:(1) AND dvObjectType:())",                      null),
+            Arguments.of(list(1L),          "",          "dvObjectType", "(:(1) AND dvObjectType:(dvObjectType))",          null),
+            Arguments.of(list(1L),          "paramName", null,           "(paramName:(1))",                                 null),
+            Arguments.of(list(1L),          "paramName", "",             "(paramName:(1) AND dvObjectType:())",             null),
+            Arguments.of(list(1L),          "paramName", "dvObjectType", "(paramName:(1) AND dvObjectType:(dvObjectType))", null),
+            
+            Arguments.of(list(1L, null),    null,        null,           "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(1L, null),    null,        "",             "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(1L, null),    null,        "dvObjectType", "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(1L, null),    "",          null,           "(:(1))",                                          null),
+            Arguments.of(list(1L, null),    "",          "",             "(:(1) AND dvObjectType:())",                      null),
+            Arguments.of(list(1L, null),    "",          "dvObjectType", "(:(1) AND dvObjectType:(dvObjectType))",          null),
+            Arguments.of(list(1L, null),    "paramName", null,           "(paramName:(1))",                                 null),
+            Arguments.of(list(1L, null),    "paramName", "",             "(paramName:(1) AND dvObjectType:())",             null),
+            Arguments.of(list(1L, null),    "paramName", "dvObjectType", "(paramName:(1) AND dvObjectType:(dvObjectType))", null)
+        );
+    }
+    
+    /**
+     * @param expectedResult May either be (i) the expected query part or (ii) the expected exception message
+     */
+    @ParameterizedTest
+    @MethodSource("data")
+    void testFormatIdsForSolrClause(List<Long> sliceOfIds, String paramName, String dvObjectType,
+                                    String expectedResult, Class<Throwable> expectedException) {
+        SolrQueryFormatter sqf = new SolrQueryFormatter();
+        
+        if (expectedException == null) {
+            assertEquals(expectedResult, sqf.formatIdsForSolrClause(sliceOfIds, paramName, dvObjectType));
+            return;
         }
-
+        
+        Throwable e = assertThrows(expectedException, () -> sqf.formatIdsForSolrClause(sliceOfIds, paramName, dvObjectType));
+        assertEquals(expectedResult, e.getMessage());
+    }
+    
+    static List<Long> list(Long... args) {
+        return Arrays.asList(args);
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetDataTest.java b/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetDataTest.java
index 9d5b5e0e70e..d7831003142 100644
--- a/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetDataTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetDataTest.java
@@ -5,31 +5,32 @@
  */
 package edu.harvard.iq.dataverse.passwordreset;
 
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 public class PasswordResetDataTest {
 
     public PasswordResetDataTest() {
     }
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
 
-    @Before
+    @BeforeEach
     public void setUp() {
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBeanTest.java
index d7da02e4459..4fbd2352d09 100644
--- a/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBeanTest.java
@@ -10,10 +10,9 @@
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 import org.mockito.ArgumentMatchers;
-import org.mockito.Matchers;
 
-import javax.persistence.EntityManager;
-import javax.persistence.TypedQuery;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.TypedQuery;
 import java.util.Arrays;
 import java.util.List;
 
@@ -97,7 +96,7 @@ void testAttemptPasswordReset_withNullNewPassword() {
 
     @Test
     void testAttemptPasswordReset_withValidationErrors() {
-        when(mockedPasswordValidatorServiceBean.validate(Matchers.anyString())).thenReturn(Arrays.asList("error"));
+        when(mockedPasswordValidatorServiceBean.validate(ArgumentMatchers.anyString())).thenReturn(Arrays.asList("error"));
 
         PasswordChangeAttemptResponse passwordChangeAttemptResponse = passwordResetServiceBean.attemptPasswordReset(new BuiltinUser(), "newpass", "token");
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordValidatorTest.java
index f6d02e35ddf..c15f7fa95e9 100644
--- a/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordValidatorTest.java
@@ -1,72 +1,50 @@
 package edu.harvard.iq.dataverse.passwordreset;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
-import java.util.Arrays;
-import java.util.Collection;
+import java.util.stream.Stream;
 
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
 
-@RunWith(Parameterized.class)
 public class PasswordValidatorTest {
-
-    public String password;
-    public boolean expected;
-    public boolean mustContainSpecialCharacters;
-    public boolean mustContainCapitalLetters;
-    public boolean mustContainNumbers;
-    public int minLength;
-    public int maxLength;
-
-    public PasswordValidatorTest(String password, boolean expected, boolean mustContainSpecialCharacters,
-            boolean mustContainCapitalLetters, boolean mustContainNumbers, int minLength, int maxLength) {
-        this.password = password;
-        this.expected = expected;
-        this.mustContainSpecialCharacters = mustContainSpecialCharacters;
-        this.mustContainCapitalLetters = mustContainCapitalLetters;
-        this.mustContainNumbers = mustContainNumbers;
-        this.minLength = minLength;
-        this.maxLength = maxLength;
-    }
-
-    @Parameters
-    public static Collection<Object[]> parameters() {
-        return Arrays.asList(
-             new Object[][] {
-                // Check if PasswordValidator correctly validates correct passwords
-                // with all combinations of Special Characters,
-                // Capital Letters and Numbers
-                {"abcdefghabcdefgh", true, false, false, false, 8, 30},    
-                {"@bcdefgh@bcdefgh", true, true, false, false, 8, 30},      
-                {"@bAdefgh@bAdefgh", true, true, true, false, 8, 30},      
-                {"abAdefghabAdefgh", true, false, true, false, 8, 30},     
-                {"a1Adefgha1Adefgh", true, false, true, true, 8, 30},      
-                {"ab1defghab1defgh", true, false, false, true, 8, 30},     
-                {"@1cdefgh@1cdefgh", true, true, false, true, 8, 30},      
-                {"@1Adefgh@1Adefgh", true, true, true, true, 8, 30},      
-                // Check if PasswordValidator correctly rejects wrong passwords
-                // with all combinations of Special Characters,
-                // Capital Letters and Numbers
-                {"abcabc", false, false, false, false, 8, 30},
-                {"abcdabcd", false, true, false, false, 8, 30},       
-                {"@bcd@bcd", false, true, true, false, 8, 30},       
-                {"@bc1@bc1", false, false, true, false, 8, 30},      
-                {"a1cda1cd", false, false, true, true, 8, 30},       
-                {"AbcdAbcd", false, false, false, true, 8, 30},      
-                {"@Bcd@Bcd", false, true, false, true, 8, 30},       
-                {"a1Ada1Ad", false, true, true, true, 8, 30},
-                {"", false, false, false, false, 1, 30},
-                {" ", false, false, false, false, 1, 30},
-                {"?!abcdef", false, true, false, false, 8, 30}
-             }
+    
+    static Stream<Arguments> testCases() {
+        return Stream.of(
+            // Check if PasswordValidator correctly validates correct passwords
+            // with all combinations of Special Characters,
+            // Capital Letters and Numbers
+            Arguments.of("abcdefghabcdefgh", true, false, false, false, 8, 30),
+            Arguments.of("@bcdefgh@bcdefgh", true, true, false, false, 8, 30),
+            Arguments.of("@bAdefgh@bAdefgh", true, true, true, false, 8, 30),
+            Arguments.of("abAdefghabAdefgh", true, false, true, false, 8, 30),
+            Arguments.of("a1Adefgha1Adefgh", true, false, true, true, 8, 30),
+            Arguments.of("ab1defghab1defgh", true, false, false, true, 8, 30),
+            Arguments.of("@1cdefgh@1cdefgh", true, true, false, true, 8, 30),
+            Arguments.of("@1Adefgh@1Adefgh", true, true, true, true, 8, 30),
+            // Check if PasswordValidator correctly rejects wrong passwords
+            // with all combinations of Special Characters,
+            // Capital Letters and Numbers
+            Arguments.of("abcabc", false, false, false, false, 8, 30),
+            Arguments.of("abcdabcd", false, true, false, false, 8, 30),
+            Arguments.of("@bcd@bcd", false, true, true, false, 8, 30),
+            Arguments.of("@bc1@bc1", false, false, true, false, 8, 30),
+            Arguments.of("a1cda1cd", false, false, true, true, 8, 30),
+            Arguments.of("AbcdAbcd", false, false, false, true, 8, 30),
+            Arguments.of("@Bcd@Bcd", false, true, false, true, 8, 30),
+            Arguments.of("a1Ada1Ad", false, true, true, true, 8, 30),
+            Arguments.of("", false, false, false, false, 1, 30),
+            Arguments.of(" ", false, false, false, false, 1, 30),
+            Arguments.of("?!abcdef", false, true, false, false, 8, 30)
         );
     }
     
-    @Test
-    public void testValidatePassword() {
+    @ParameterizedTest
+    @MethodSource("testCases")
+    void testValidatePassword(String password, boolean expected, boolean mustContainSpecialCharacters,
+                              boolean mustContainCapitalLetters, boolean mustContainNumbers, int minLength,
+                              int maxLength) {
         PasswordValidator validator = PasswordValidator.buildValidator(mustContainSpecialCharacters,
                 mustContainCapitalLetters, mustContainNumbers, minLength, maxLength);
         boolean isValidPassword = validator.validatePassword(password);
diff --git a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java
index cbab8745a9b..dabc7f68fce 100644
--- a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java
@@ -1,7 +1,6 @@
 package edu.harvard.iq.dataverse.pidproviders;
 
 import edu.harvard.iq.dataverse.DOIServiceBean;
-import edu.harvard.iq.dataverse.DataFileCategoryServiceBean;
 import edu.harvard.iq.dataverse.GlobalId;
 import edu.harvard.iq.dataverse.GlobalIdServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
@@ -10,23 +9,25 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.NotFoundException;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.NotFoundException;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.extension.ExtendWith;
+
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
 import org.mockito.Mockito;
 import org.mockito.MockitoAnnotations;
-import org.mockito.junit.MockitoJUnitRunner;
-import org.junit.Before;
-import org.junit.Ignore;
+import org.mockito.junit.jupiter.MockitoExtension;
+
 import static org.junit.jupiter.api.Assertions.*;
 
 /**
  * Useful for testing but requires DataCite credentials, etc.
  */
-@RunWith(MockitoJUnitRunner.class)
+@ExtendWith(MockitoExtension.class)
 public class PidUtilTest {
     @Mock
     private SettingsServiceBean settingsServiceBean;
@@ -34,14 +35,15 @@ public class PidUtilTest {
     private PermaLinkPidProviderServiceBean p = new PermaLinkPidProviderServiceBean();
     
 
-    @Before public void initMocks() {
+    @BeforeEach
+    public void initMocks() {
         MockitoAnnotations.initMocks(this);
         Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Protocol)).thenReturn("perma");
         Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Authority)).thenReturn("DANSLINK");
         p.reInit();
     }
     
-    @Ignore
+    @Disabled
     @Test
     public void testGetDoi() throws IOException {
         String username = System.getenv("DataCiteUsername");
diff --git a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java
index 3984f972308..8c9e0261bfa 100644
--- a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java
@@ -14,7 +14,7 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import org.junit.Assert;
+
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.params.ParameterizedTest;
diff --git a/src/test/java/edu/harvard/iq/dataverse/provenance/ProvInvestigatorTest.java b/src/test/java/edu/harvard/iq/dataverse/provenance/ProvInvestigatorTest.java
index efa83fbb950..f59f686a94c 100644
--- a/src/test/java/edu/harvard/iq/dataverse/provenance/ProvInvestigatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/provenance/ProvInvestigatorTest.java
@@ -6,14 +6,15 @@
 package edu.harvard.iq.dataverse.provenance;
 
 import com.google.gson.JsonParser;
-import edu.harvard.iq.dataverse.NonEssentialTests;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.logging.Logger;
-import static org.junit.Assert.*;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import static org.junit.jupiter.api.Assertions.*;
+
+import edu.harvard.iq.dataverse.util.testing.Tags;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
 
 /**
  *
@@ -27,13 +28,13 @@ public class ProvInvestigatorTest {
     JsonParser jsonParser;
     private static final Logger logger = Logger.getLogger(ProvInvestigatorTest.class.getCanonicalName());
     
-    @Before
+    @BeforeEach
     public void setUp() {
         provUtilBean = ProvInvestigator.getInstance();
         jsonParser = new JsonParser();
     }
     
-    @Category(NonEssentialTests.class)
+    @Tag(Tags.NOT_ESSENTIAL_UNITTESTS)
     @Test
     public void testProvValidator() {   
         String validJsonString = "{\n" +
@@ -105,7 +106,7 @@ public void testProvValidator() {
 
     }
     
-    @Category(NonEssentialTests.class)
+    @Tag(Tags.NOT_ESSENTIAL_UNITTESTS)
     @Test
     public void testProvNamesNotInsideEntity() throws IOException {
         //name and type on their own
@@ -120,7 +121,7 @@ public void testProvNamesNotInsideEntity() throws IOException {
         assertFalse(entities.size() > 0); 
     }
     
-    @Category(NonEssentialTests.class)
+    @Tag(Tags.NOT_ESSENTIAL_UNITTESTS)
     @Test
     public void testProvNameJsonParserEmptyEntities() throws IOException {
         String jsonString = "{\n" +
@@ -159,8 +160,8 @@ public void testProvNameJsonParserEmptyEntities() throws IOException {
     
     //Note: this test has entity tags in multiple places, all with unique names
     //Only one entity is added to our list per unique name.
-
-    @Category(NonEssentialTests.class)
+    
+    @Tag(Tags.NOT_ESSENTIAL_UNITTESTS)
     @Test
     public void testProvJsonWithEntitiesInMultiplePlaces() throws IOException {
         String jsonString = "{\n" +
@@ -233,7 +234,7 @@ public void testProvJsonWithEntitiesInMultiplePlaces() throws IOException {
         assertTrue(entities.size() == 7);
     }
     
-    @Category(NonEssentialTests.class)
+    @Tag(Tags.NOT_ESSENTIAL_UNITTESTS)
     @Test
     public void testProvJsonWithEntitiesInMultiplePlacesWithSameNames() throws IOException {
         String jsonString = "{\n" +
@@ -271,8 +272,8 @@ public void testProvJsonWithEntitiesInMultiplePlacesWithSameNames() throws IOExc
         assertTrue(entities.get("ex:report2").fileType.equals("not report"));
         assertTrue(entities.size() == 3); //ex:report2 & ex:report1 are repeated
     }
-   
-    @Category(NonEssentialTests.class)
+    
+    @Tag(Tags.NOT_ESSENTIAL_UNITTESTS)
     @Test
     public void testProvLongJsonWithEntities() throws IOException {
         String jsonString = "{\n" +
diff --git a/src/test/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtilTest.java
index d09c6eee8dc..99fd3b2766f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtilTest.java
@@ -4,10 +4,10 @@
 import edu.harvard.iq.dataverse.locality.StorageSite;
 import java.util.ArrayList;
 import java.util.List;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 public class RepositoryStorageAbstractionLayerUtilTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/search/IndexUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/search/IndexUtilTest.java
index 6599f829673..9be53b8b8b1 100644
--- a/src/test/java/edu/harvard/iq/dataverse/search/IndexUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/search/IndexUtilTest.java
@@ -1,32 +1,32 @@
 package edu.harvard.iq.dataverse.search;
 
 import java.util.Arrays;
-import java.util.List;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class IndexUtilTest {
 
     public IndexUtilTest() {
     }
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
 
-    @Before
+    @BeforeEach
     public void setUp() {
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBeanTest.java
index 0ea5597b905..c9f9eb67130 100644
--- a/src/test/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBeanTest.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.search;
 
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 public class SearchFilesServiceBeanTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SearchIncludeFragmentTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SearchIncludeFragmentTest.java
index f94da336ca3..234d72c0d19 100644
--- a/src/test/java/edu/harvard/iq/dataverse/search/SearchIncludeFragmentTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/search/SearchIncludeFragmentTest.java
@@ -5,7 +5,7 @@
 import edu.harvard.iq.dataverse.MetadataBlock;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.util.Arrays;
diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java
index 8def87cee5a..4fb29869db7 100644
--- a/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java
@@ -1,18 +1,18 @@
 package edu.harvard.iq.dataverse.search;
 
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.util.ArrayList;
 import java.util.List;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 // ****************************************************************************************
 // The following tests test the setPublicationStatuses method aiming for 100% prime 
@@ -35,7 +35,7 @@ public class SolrSearchResultTest {
 
     SolrSearchResult solrSearchResult;
 
-    @Before
+    @BeforeEach
     public void before() {
         this.unpublishedFlag = IndexServiceBean.getUNPUBLISHED_STRING();
         this.publishedFlag = IndexServiceBean.getPUBLISHED_STRING();
@@ -47,7 +47,7 @@ public void before() {
         this.solrSearchResult = new SolrSearchResult("myQuery", "myName");
     }
 
-    @After
+    @AfterEach
     public void after() {
         this.unpublishedFlag = null;
         this.publishedFlag = null;
diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SortByTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SortByTest.java
index 956063d4e1f..c8425198b83 100644
--- a/src/test/java/edu/harvard/iq/dataverse/search/SortByTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/search/SortByTest.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse.search;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 public class SortByTest {
 
@@ -14,14 +14,14 @@ public class SortByTest {
     private String order;
     private SortBy instance;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         this.field = "field";
         this.order = SortBy.ASCENDING;
         this.instance = new SortBy(field, order);
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
         this.field = null;
         this.order = null;
diff --git a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java
index 4f2b00bbea4..41b13417a23 100644
--- a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java
@@ -21,10 +21,10 @@
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.List;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.Test;
 import org.xml.sax.SAXException;
 
 public class SiteMapUtilTest {
diff --git a/src/test/java/edu/harvard/iq/dataverse/userdata/UserListMakerTest.java b/src/test/java/edu/harvard/iq/dataverse/userdata/UserListMakerTest.java
index b744db5be20..10c07cf0ef5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/userdata/UserListMakerTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/userdata/UserListMakerTest.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.userdata;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.mockito.Mockito.mock;
 
 import org.junit.jupiter.api.AfterEach;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/BitSetTest.java b/src/test/java/edu/harvard/iq/dataverse/util/BitSetTest.java
index 5aa37e8b05c..475d986b63c 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/BitSetTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/BitSetTest.java
@@ -7,12 +7,13 @@
 import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.List;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -27,21 +28,21 @@ enum TestEnum {
 	public BitSetTest() {
 	}
 	
-	@BeforeClass
+	@BeforeAll
 	public static void setUpClass() {
 	}
 	
-	@AfterClass
+	@AfterAll
 	public static void tearDownClass() {
 	}
 	
 	BitSet sut;
-	@Before
+	@BeforeEach
 	public void setUp() {
 		sut = new BitSet();
 	}
 	
-	@After
+	@AfterEach
 	public void tearDown() {
 	}
 
@@ -116,9 +117,9 @@ public void testIntersect() {
 		sut = sut1.copy().intersect(sut2);
 		for ( short i : BitSet.allIndices() ) {
 			if ( sut.isSet(i) ) {
-				assertTrue( "expected true at idx " + i, sut1.isSet(i) && sut2.isSet(i) );
+				assertTrue(sut1.isSet(i) && sut2.isSet(i), "expected true at idx " + i);
 			} else {
-				assertFalse( "expected false at idx " + i, sut1.isSet(i) && sut2.isSet(i) );
+				assertFalse(sut1.isSet(i) && sut2.isSet(i), "expected false at idx " + i);
 			}
 		}
 	}
@@ -133,9 +134,9 @@ public void testXor() {
 		sut = sut1.copy().xor(sut2);
 		for ( short i : BitSet.allIndices() ) {
 			if ( sut.isSet(i) ) {
-				assertTrue( "expected true at idx " + i, sut1.isSet(i) ^ sut2.isSet(i) );
+				assertTrue(sut1.isSet(i) ^ sut2.isSet(i), "expected true at idx " + i);
 			} else {
-				assertFalse( "expected false at idx " + i, sut1.isSet(i) ^ sut2.isSet(i) );
+				assertFalse(sut1.isSet(i) ^ sut2.isSet(i), "expected false at idx " + i);
 			}
 		}
 	}
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/BundleUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/BundleUtilTest.java
index 5be06a5e07b..8f788154a93 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/BundleUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/BundleUtilTest.java
@@ -2,11 +2,13 @@
 
 import java.util.Arrays;
 import java.util.List;
-import java.util.Locale;
 import java.util.MissingResourceException;
-import java.util.ResourceBundle;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
 
 public class BundleUtilTest {
 
@@ -79,14 +81,14 @@ public void testStringFromPropertyFile() {
     }
 
     //To assure that the MissingResourceException bubble up from this call
-    @Test(expected = MissingResourceException.class)
-    public void testStringFromPropertyFileException() {
-        BundleUtil.getStringFromPropertyFile("FAKE","MimeTypeFacets");
+    @Test
+    void testStringFromPropertyFileException() {
+        assertThrows(MissingResourceException.class, () -> BundleUtil.getStringFromPropertyFile("FAKE","MimeTypeFacets"));
     }
     
     //To assure MissingResourceException is caught when calling normal bundle calls
     @Test
-    public void testNoErrorNonExistentStringBundle() {
-        BundleUtil.getStringFromBundle("FAKE", null, BundleUtil.getResourceBundle("MimeTypeFacets")); 
+    void testNoErrorNonExistentStringBundle() {
+        assertDoesNotThrow(() -> BundleUtil.getStringFromBundle("FAKE", null, BundleUtil.getResourceBundle("MimeTypeFacets")));
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/FileSortFieldAndOrderTest.java b/src/test/java/edu/harvard/iq/dataverse/util/FileSortFieldAndOrderTest.java
index 0ad76832952..84a200542fe 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/FileSortFieldAndOrderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/FileSortFieldAndOrderTest.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.util;
 
 import edu.harvard.iq.dataverse.search.SortBy;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class FileSortFieldAndOrderTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java
index 1d481f18cf5..2cfe9f25d7e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java
@@ -8,10 +8,6 @@
 import edu.harvard.iq.dataverse.FileMetadata;
 import edu.harvard.iq.dataverse.Guestbook;
 import edu.harvard.iq.dataverse.TermsOfUseAndAccess;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import edu.harvard.iq.dataverse.*;
-import edu.harvard.iq.dataverse.api.UtilIT;
 import edu.harvard.iq.dataverse.license.License;
 import edu.harvard.iq.dataverse.util.FileUtil.FileCitationExtension;
 
@@ -19,110 +15,76 @@
 import java.io.IOException;
 import java.time.LocalDate;
 import java.net.URI;
-import java.net.URL;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.Arrays;
-import java.util.Collection;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.fail;
-
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
-
-@RunWith(Enclosed.class)
-public class FileUtilTest {
-
-    @RunWith(Parameterized.class)
-    public static class FileUtilParamTest {
-
-        @Parameters
-        public static Collection data() {
-            return Arrays.asList(new Object[][] {
-                { null, null, null },
-
-                { "trees.png-endnote.xml", "trees.png", FileUtil.FileCitationExtension.ENDNOTE },
-                { "trees.png.ris", "trees.png", FileUtil.FileCitationExtension.RIS },
-                { "trees.png.bib", "trees.png", FileUtil.FileCitationExtension.BIBTEX },
-                { null, "trees.png", null },
+import java.util.stream.Stream;
 
-                { "50by1000-endnote.xml", "50by1000.tab", FileUtil.FileCitationExtension.ENDNOTE },
-                { "50by1000.ris", "50by1000.tab", FileUtil.FileCitationExtension.RIS },
-                { "50by1000.bib", "50by1000.tab", FileUtil.FileCitationExtension.BIBTEX }
-            });
-        }
-
-        @Parameter
-        public String expectedFileName;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
 
-        @Parameter(1)
-        public String actualFileName;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
-        @Parameter(2)
-        public FileCitationExtension citationExtension;
-
-        @Test
-        public void testGetCiteDataFileFilename() {
-            assertEquals(expectedFileName, FileUtil.getCiteDataFileFilename(actualFileName, citationExtension));
-        }
+public class FileUtilTest {
+    
+    static Stream<Arguments> dataFilenames() {
+        return Stream.of(
+            Arguments.of(null, null, null),
+            Arguments.of("trees.png-endnote.xml", "trees.png", FileUtil.FileCitationExtension.ENDNOTE),
+            Arguments.of("trees.png.ris", "trees.png", FileUtil.FileCitationExtension.RIS),
+            Arguments.of("trees.png.bib", "trees.png", FileUtil.FileCitationExtension.BIBTEX),
+            Arguments.of(null, "trees.png", null),
+            Arguments.of("50by1000-endnote.xml", "50by1000.tab", FileUtil.FileCitationExtension.ENDNOTE),
+            Arguments.of("50by1000.ris", "50by1000.tab", FileUtil.FileCitationExtension.RIS),
+            Arguments.of("50by1000.bib", "50by1000.tab", FileUtil.FileCitationExtension.BIBTEX)
+        );
     }
-
-    @RunWith(Parameterized.class)
-    public static class FileUtilParamTest2 {
-
-        @Parameter
-        public String expectedString;
-
-        @Parameter(1)
-        public String originalName;
-
-        @Parameter(2)
-        public String newExtension;
-
-        @Parameters
-        public static Collection data() {
-            return Arrays.asList(new Object[][] {
-                // functional approach: what should the method do
-                // replace no extension with an empty extension
-                { "no-extension.", "no-extension", ""},
-
-                // replace extension x with same extension
-                { "extension.x", "extension.x", "x" },
-
-                // replace extension x with another extension y
-                { "extension.y", "extension.x", "y" },
-
-                // interface approach: what are possible inputs
-                // will not pass as null is not handled
-                //{ null, null, null },
-                //{ null, null, "" },
-                //{ null, null, "y" },
-
-                { ".null", "", null },
-                { ".", "", "" },
-                { ".y", "", "y" },
-            });
-        }
-
-        @Test
-        public void testReplaceExtension() {
-            assertEquals(expectedString, FileUtil.replaceExtension(originalName, newExtension));
-        }
-
+    
+    @ParameterizedTest
+    @MethodSource("dataFilenames")
+    void testGetCiteDataFileFilename(String expectedFileName, String actualFileName, FileCitationExtension citationExtension) {
+        assertEquals(expectedFileName, FileUtil.getCiteDataFileFilename(actualFileName, citationExtension));
+    }
+    
+    static Stream<Arguments> dataReplaceNames() {
+        return Stream.of(
+            // functional approach: what should the method do
+            // replace no extension with an empty extension
+            Arguments.of("no-extension.", "no-extension", ""),
+        
+            // replace extension x with same extension
+            Arguments.of("extension.x", "extension.x", "x"),
+        
+            // replace extension x with another extension y
+            Arguments.of("extension.y", "extension.x", "y"),
+        
+            // interface approach: what are possible inputs
+            // will not pass as null is not handled
+            //Arguments.of(null, null, null),
+            //Arguments.of(null, null, ""),
+            //Arguments.of(null, null, "y"),
+            
+            Arguments.of(".null", "", null),
+            Arguments.of(".", "", ""),
+            Arguments.of(".y", "", "y")
+        );
+    }
+    
+    @ParameterizedTest
+    @MethodSource("dataReplaceNames")
+    void testReplaceExtension(String expectedString, String originalName, String newExtension) {
+        assertEquals(expectedString, FileUtil.replaceExtension(originalName, newExtension));
     }
 
-    public static class FileUtilNoParamTest {
+    static class FileUtilNoParamTest {
         @Test
         public void testIsDownloadPopupRequiredNull() {
-            assertEquals(false, FileUtil.isDownloadPopupRequired(null));
+            assertFalse(FileUtil.isDownloadPopupRequired(null));
         }
 
         @Test
@@ -130,7 +92,7 @@ public void testIsDownloadPopupRequiredDraft() {
             Dataset dataset = new Dataset();
             DatasetVersion dsv1 = dataset.getOrCreateEditVersion();
             assertEquals(DatasetVersion.VersionState.DRAFT, dsv1.getVersionState());
-            assertEquals(false, FileUtil.isDownloadPopupRequired(dsv1));
+            assertFalse(FileUtil.isDownloadPopupRequired(dsv1));
         }
 
         @Test
@@ -142,7 +104,7 @@ public void testIsDownloadPopupRequiredLicenseCC0() {
             license.setDefault(true);
             termsOfUseAndAccess.setLicense(license);
             dsv1.setTermsOfUseAndAccess(termsOfUseAndAccess);
-            assertEquals(false, FileUtil.isDownloadPopupRequired(dsv1));
+            assertFalse(FileUtil.isDownloadPopupRequired(dsv1));
         }
 
         @Test
@@ -160,7 +122,7 @@ public void testIsDownloadPopupRequiredHasTermsOfUseAndCc0License() {
             termsOfUseAndAccess.setLicense(license);
             termsOfUseAndAccess.setTermsOfUse("be excellent to each other");
             dsv1.setTermsOfUseAndAccess(termsOfUseAndAccess);
-            assertEquals(false, FileUtil.isDownloadPopupRequired(dsv1));
+            assertFalse(FileUtil.isDownloadPopupRequired(dsv1));
         }
 
         @Test
@@ -171,7 +133,7 @@ public void testIsDownloadPopupRequiredHasTermsOfUseAndNoneLicense() {
             termsOfUseAndAccess.setLicense(null);
             termsOfUseAndAccess.setTermsOfUse("be excellent to each other");
             dsv1.setTermsOfUseAndAccess(termsOfUseAndAccess);
-            assertEquals(true, FileUtil.isDownloadPopupRequired(dsv1));
+            assertTrue(FileUtil.isDownloadPopupRequired(dsv1));
         }
 
         @Test
@@ -181,7 +143,7 @@ public void testIsDownloadPopupRequiredHasTermsOfAccess() {
             TermsOfUseAndAccess termsOfUseAndAccess = new TermsOfUseAndAccess();
             termsOfUseAndAccess.setTermsOfAccess("Terms of *Access* is different than Terms of Use");
             dsv1.setTermsOfUseAndAccess(termsOfUseAndAccess);
-            assertEquals(true, FileUtil.isDownloadPopupRequired(dsv1));
+            assertTrue(FileUtil.isDownloadPopupRequired(dsv1));
         }
 
         @Test
@@ -195,17 +157,17 @@ public void testIsDownloadPopupRequiredHasGuestBook() {
             dataset.setGuestbook(guestbook);
             Dataverse dataverse = new Dataverse();
             guestbook.setDataverse(dataverse);
-            assertEquals(true, FileUtil.isDownloadPopupRequired(datasetVersion));
+            assertTrue(FileUtil.isDownloadPopupRequired(datasetVersion));
         }
 
         @Test
         public void testIsPubliclyDownloadable() {
-            assertEquals(false, FileUtil.isPubliclyDownloadable(null));
+            assertFalse(FileUtil.isPubliclyDownloadable(null));
 
             FileMetadata restrictedFileMetadata = new FileMetadata();
             restrictedFileMetadata.setRestricted(true);
             restrictedFileMetadata.setDataFile(new DataFile());
-            assertEquals(false, FileUtil.isPubliclyDownloadable(restrictedFileMetadata));
+            assertFalse(FileUtil.isPubliclyDownloadable(restrictedFileMetadata));
 
             FileMetadata nonRestrictedFileMetadata = new FileMetadata();
             nonRestrictedFileMetadata.setDataFile(new DataFile());
@@ -215,7 +177,7 @@ public void testIsPubliclyDownloadable() {
             Dataset dataset = new Dataset();
             dsv.setDataset(dataset);
             nonRestrictedFileMetadata.setRestricted(false);
-            assertEquals(true, FileUtil.isPubliclyDownloadable(nonRestrictedFileMetadata));
+            assertTrue(FileUtil.isPubliclyDownloadable(nonRestrictedFileMetadata));
         }
 
         @Test
@@ -232,7 +194,7 @@ public void testIsPubliclyDownloadable2() {
             Dataset dataset = new Dataset();
             dsv.setDataset(dataset);
             nonRestrictedFileMetadata.setRestricted(false);
-            assertEquals(false, FileUtil.isPubliclyDownloadable(nonRestrictedFileMetadata));
+            assertFalse(FileUtil.isPubliclyDownloadable(nonRestrictedFileMetadata));
         }
 
         @Test
@@ -250,7 +212,7 @@ public void testIsPubliclyDownloadable3() {
             Dataset dataset = new Dataset();
             dsv.setDataset(dataset);
             embargoedFileMetadata.setRestricted(false);
-            assertEquals(false, FileUtil.isPubliclyDownloadable(embargoedFileMetadata));
+            assertFalse(FileUtil.isPubliclyDownloadable(embargoedFileMetadata));
         }
 
         @Test
@@ -272,7 +234,7 @@ public void testgetFileDownloadUrl() {
 
         @Test
         public void testGetPublicDownloadUrl() {
-            assertEquals(null, FileUtil.getPublicDownloadUrl(null, null, null));
+            assertNull(FileUtil.getPublicDownloadUrl(null, null, null));
             assertEquals("https://demo.dataverse.org/api/access/datafile/:persistentId?persistentId=doi:10.5072/FK2/TLU3EP", FileUtil.getPublicDownloadUrl("https://demo.dataverse.org", "doi:10.5072/FK2/TLU3EP", 33L)); //pid before fileId
             assertEquals("https://demo.dataverse.org/api/access/datafile/:persistentId?persistentId=doi:10.5072/FK2/TLU3EP", FileUtil.getPublicDownloadUrl("https://demo.dataverse.org", "doi:10.5072/FK2/TLU3EP", null));
             assertEquals("https://demo.dataverse.org/api/access/datafile/33", FileUtil.getPublicDownloadUrl("https://demo.dataverse.org", null, 33L)); //pid before fileId
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/FirstNameTest.java b/src/test/java/edu/harvard/iq/dataverse/util/FirstNameTest.java
index 972a06ef99e..1925094a2d8 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/FirstNameTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/FirstNameTest.java
@@ -1,10 +1,8 @@
 package edu.harvard.iq.dataverse.util;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
-import edu.harvard.iq.dataverse.util.FirstNames;
-
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/JhoveFileTypeTest.java b/src/test/java/edu/harvard/iq/dataverse/util/JhoveFileTypeTest.java
index 88a8d24c772..879c4bc1c4e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/JhoveFileTypeTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/JhoveFileTypeTest.java
@@ -5,10 +5,10 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import org.apache.commons.io.FileUtils;
-import org.junit.AfterClass;
-import static org.junit.Assert.assertEquals;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class JhoveFileTypeTest {
 
@@ -24,7 +24,7 @@ public class JhoveFileTypeTest {
     static File ico;
     static File ipynb;
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         System.setProperty("com.sun.aas.instanceRoot", baseDirForConfigFiles);
         jhoveFileType = new JhoveFileType();
@@ -41,7 +41,7 @@ public static void setUpClass() {
         ipynb = new File("src/test/java/edu/harvard/iq/dataverse/util/irc-metrics.ipynb");
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
         // SiteMapUtilTest relies on com.sun.aas.instanceRoot being null.
         System.clearProperty("com.sun.aas.instanceRoot");
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/JsfHelperTest.java b/src/test/java/edu/harvard/iq/dataverse/util/JsfHelperTest.java
index 545d3b1a31f..316b9c3a2aa 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/JsfHelperTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/JsfHelperTest.java
@@ -4,72 +4,53 @@
 
 package edu.harvard.iq.dataverse.util;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
-import java.util.Arrays;
-import java.util.Collection;
+import java.util.stream.Stream;
 
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
 
-/**
- *
- * @author michael
- */
-@RunWith(Parameterized.class)
 public class JsfHelperTest {
 	
 	enum TestEnum { Lorem, Ipsum, Dolor, Sit, Amet }
 	
-	@BeforeClass
+	@BeforeAll
 	public static void setUpClass() {
 	}
 	
-	@AfterClass
+	@AfterAll
 	public static void tearDownClass() {
 	}
 	
-	@Before
+	@BeforeEach
 	public void setUp() {
 	}
 	
-	@After
+	@AfterEach
 	public void tearDown() {
 	}
-
-	public TestEnum inputEnum;
-	public String inputString;
-	public TestEnum defaultEnumValue;
 	
-	public JsfHelperTest(TestEnum inputEnum, String inputString, TestEnum defaultEnumValue) {
-		this.inputEnum = inputEnum;
-		this.inputString = inputString;
-		this.defaultEnumValue = defaultEnumValue;
-	}
-
-	@Parameters
-	public static Collection<Object[]> parameters() {
-		return Arrays.asList (
-			new Object[][] {
-				{ TestEnum.Lorem, "Lorem", TestEnum.Dolor },
-				{ TestEnum.Lorem, "Lorem   ", TestEnum.Dolor },
-				{ TestEnum.Dolor, null, TestEnum.Dolor },
-				{ TestEnum.Dolor, "THIS IS A BAD VALUE", TestEnum.Dolor },
-			}
+	static Stream<Arguments> parameters() {
+		return Stream.of(
+			Arguments.of(TestEnum.Lorem, "Lorem", TestEnum.Dolor),
+			Arguments.of(TestEnum.Lorem, "Lorem   ", TestEnum.Dolor),
+			Arguments.of(TestEnum.Dolor, null, TestEnum.Dolor),
+			Arguments.of(TestEnum.Dolor, "THIS IS A BAD VALUE", TestEnum.Dolor )
 		);
 	}
 
 	/**
 	 * Test of enumValue method, of class JsfHelper.
 	 */
-	@Test
-	public void testEnumValue() {
+	@ParameterizedTest
+	@MethodSource("parameters")
+	public void testEnumValue(TestEnum inputEnum, String inputString, TestEnum defaultEnumValue) {
 		System.out.println("enumValue");
 		JsfHelper instance = new JsfHelper();
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/LruCacheTest.java b/src/test/java/edu/harvard/iq/dataverse/util/LruCacheTest.java
index b7ecff4ed8b..697e4853c3f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/LruCacheTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/LruCacheTest.java
@@ -1,9 +1,10 @@
 package edu.harvard.iq.dataverse.util;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -13,12 +14,12 @@ public class LruCacheTest {
     
     LruCache<Long, String> sut;
     
-    @Before
+    @BeforeEach
     public void setUp() {
         sut = new LruCache<>();
     }
     
-    @After
+    @AfterEach
     public void tearDown() {
         sut = null;
     }
@@ -65,9 +66,9 @@ public void testLRU() {
         
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testSetMaxSizeWithException() {
-        sut.setMaxSize(0l);
+    @Test
+    void testSetMaxSizeWithException() {
+        assertThrows(IllegalArgumentException.class, () -> sut.setMaxSize(0l));
     }
 
     @Test
@@ -75,14 +76,14 @@ public void testInvalidateWholeCache() {
         // prepare cache
         Long key = 0l;
         String value = "x";
-        assertEquals("put value", value, sut.put(key, value));
-        assertEquals("get value", value, sut.get(key));
+        assertEquals(value, sut.put(key, value), "put value");
+        assertEquals(value, sut.get(key), "get value");
 
         // invalidate cache
         sut.invalidate();
 
         // verify invalidation
-        assertEquals("verify that value is no longer here", null, sut.get(key));
+        assertNull(sut.get(key), "verify that value is no longer here");
     }
 
     @Test
@@ -90,19 +91,19 @@ public void testInvalidateOneKeyOfCache() {
         // prepare cache
         Long key1 = 0l;
         String value1 = "x";
-        assertEquals("put value 1", value1, sut.put(key1, value1));
-        assertEquals("get value 1", value1, sut.get(key1));
+        assertEquals(value1, sut.put(key1, value1), "put value 1");
+        assertEquals(value1, sut.get(key1), "get value 1");
 
         Long key2 = 1l;
         String value2 = "y";
-        assertEquals("put value 2", value2, sut.put(key2, value2));
-        assertEquals("get value 2", value2, sut.get(key2));
+        assertEquals(value2, sut.put(key2, value2), "put value 2");
+        assertEquals(value2, sut.get(key2), "get value 2");
 
         // invalidate cache
         sut.invalidate(key1);
 
         // verify invalidation
-        assertEquals("verify that value 1 is no longer here", null, sut.get(key1));
-        assertEquals("verify that value 2 still exists", value2, sut.get(key2));
+        assertNull(sut.get(key1), "verify that value 1 is no longer here");
+        assertEquals(value2, sut.get(key2), "verify that value 2 still exists");
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java
index 76e447b3faa..bbdf5a84fc3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java
@@ -5,7 +5,7 @@
 import edu.harvard.iq.dataverse.branding.BrandingUtil;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import org.junit.jupiter.api.Order;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.TestMethodOrder;
@@ -14,7 +14,7 @@
 import org.mockito.Mock;
 import org.mockito.Mockito;
 import org.mockito.junit.jupiter.MockitoExtension;
-import org.junit.Before;
+import org.junit.jupiter.api.BeforeEach;
 
 @ExtendWith(MockitoExtension.class)
 @TestMethodOrder(OrderAnnotation.class)
@@ -27,7 +27,7 @@ public class MailUtilTest {
     @Mock
     SettingsServiceBean settingsSvc;
     
-    @Before
+    @BeforeEach
     public void setUp() {
         userNotification = new UserNotification();
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/MockResponse.java b/src/test/java/edu/harvard/iq/dataverse/util/MockResponse.java
index 0584c6161f1..8d5b4940c14 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/MockResponse.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/MockResponse.java
@@ -7,14 +7,14 @@
 import java.util.Map;
 import java.util.Set;
 import static java.util.stream.Collectors.toList;
-import javax.ws.rs.core.EntityTag;
-import javax.ws.rs.core.GenericType;
-import javax.ws.rs.core.Link;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.MultivaluedHashMap;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.NewCookie;
-import javax.ws.rs.core.Response;
+import jakarta.ws.rs.core.EntityTag;
+import jakarta.ws.rs.core.GenericType;
+import jakarta.ws.rs.core.Link;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.MultivaluedHashMap;
+import jakarta.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.core.NewCookie;
+import jakarta.ws.rs.core.Response;
 
 /**
  * Simple mock class for HTTP response. This is needed as the normal response builders
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/OrganizationsTest.java b/src/test/java/edu/harvard/iq/dataverse/util/OrganizationsTest.java
index 3b6cf4a7242..fb6a8e14fcd 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/OrganizationsTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/OrganizationsTest.java
@@ -1,10 +1,8 @@
 package edu.harvard.iq.dataverse.util;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
-import edu.harvard.iq.dataverse.util.Organizations;
-
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtilTest.java
index 0567d1e2b46..c3d9fd8fcd3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtilTest.java
@@ -2,11 +2,11 @@
 
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 
-import org.junit.Ignore;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 
 public class PersonOrOrgUtilTest {
 
@@ -58,7 +58,7 @@ public void testOrganizationCommaOrDash() {
             verifyIsOrganization("Geographic Data Technology, Inc. (GDT)");
         }
 
-        @Ignore
+        @Disabled
         @Test
         public void testOrganizationES() {
             //Spanish recognition is not enabled - see export/Organization.java
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/StringUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/StringUtilTest.java
index aa2d20362cb..b00b4afca7d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/StringUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/StringUtilTest.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.util;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.util.Arrays;
 import java.util.Collection;
@@ -10,267 +10,134 @@
 import java.util.Optional;
 import java.util.stream.Stream;
 
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.CsvSource;
+import org.junit.jupiter.params.provider.MethodSource;
 
-
-/**
- *
- * @author michael
- */
-@RunWith(Enclosed.class)
-public class StringUtilTest {
-
-    public StringUtilTest() {
-    }
+class StringUtilTest {
     
-    @BeforeClass
-    public static void setUpClass() {
+    /**
+     * Test of isEmpty method, of class StringUtil.
+     */
+    @ParameterizedTest
+    @CsvSource(value = {
+        "false, a",
+        "true, NULL",
+        "true, ''",
+        "true, ' '",
+        "true, \t",
+        "true, \t \t \n"
+    }, nullValues = "NULL")
+    void testIsEmpty(boolean isValid, String inputString) {
+        assertEquals( isValid, StringUtil.isEmpty(inputString) );
     }
     
-    @AfterClass
-    public static void tearDownClass() {
+    /**
+     * Test of isAlphaNumeric method, of class StringUtil.
+     */
+    @ParameterizedTest
+    @CsvSource({
+        "true,abc",
+        "true,1230",
+        "true,1230abc",
+        "true,1230abcABC",
+        "false,1230abcABC#"
+    })
+    void testIsAlphaNumeric(boolean isValid, String inputString) {
+        assertEquals(isValid, StringUtil.isAlphaNumeric(inputString) );
     }
     
-    @Before
-    public void setUp() {
+    /**
+     * Test of isAlphaNumericChar method, of class StringUtil.
+     */
+    @ParameterizedTest
+    @CsvSource({
+        "true,'a'",
+        "true,'f'",
+        "true,'z'",
+        "true,'0'",
+        "true,'1'",
+        "true,'9'",
+        "true,'A'",
+        "true,'G'",
+        "true,'Z'",
+        "false,'@'"
+    })
+    void testIsAlphaNumericChar(boolean isValid, char inputChar) {
+        assertEquals(isValid, StringUtil.isAlphaNumericChar(inputChar) );
     }
     
-    @After
-    public void tearDown() {
-    }
-
-    @RunWith(Parameterized.class)
-    public static class TestIsEmpty {
-
-        public boolean isValid;
-        public String inputString;
+    @ParameterizedTest
+    @CsvSource(value = {
+        // interface-based partitioning
+        "NULL, NULL, NULL",
+        "NULL, '', NULL",
+        "NULL, d, NULL",
         
-        public TestIsEmpty(boolean isValid, String inputString) {
-            this.isValid = isValid;
-            this.inputString = inputString;
-        }
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(
-                    new Object[][] { 
-                        { true, null },
-                        { true, "" },
-                        { true, " " },
-                        { true, "\t" },
-                        { true, "\t \t \n" },
-                        { false, "a" },
-                    }
-            );
-        }
-
-        /**
-         * Test of isEmpty method, of class StringUtil.
-         */
-        @Test
-        public void testIsEmpty() {
-            assertEquals( isValid, StringUtil.isEmpty(inputString) );
-        }
-    }
-
-    @RunWith(Parameterized.class)
-    public static class TestIsAlphaNumeric {
-
-        public boolean isValid;
-        public String inputString;
+        "'', NULL, ''",
+        "'', '', ''",
+        "'', abcdfg, ''",
         
-        public TestIsAlphaNumeric(boolean isValid, String inputString) {
-            this.isValid = isValid;
-            this.inputString = inputString;
-        }
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(
-                    new Object[][] { 
-                        { true, "abc" },
-                        { true, "1230" },
-                        { true, "1230abc" },
-                        { true, "1230abcABC" },
-                        { false, "1230abcABC#" },
-                    }
-            );
-        }
-
-        /**
-         * Test of isAlphaNumeric method, of class StringUtil.
-         */
-        @Test
-        public void testIsAlphaNumeric() {
-            assertEquals( isValid, StringUtil.isAlphaNumeric(inputString) );
-        }
-    }
-
-    @RunWith(Parameterized.class)
-    public static class TestIsAlphaNumericChar {
-
-        public boolean isValid;
-        public char inputChar;
+        "abcdfg, NULL, ''",
+        "abcdfg, '', ''",
+        "abcdfg, d, dfg",
         
-        public TestIsAlphaNumericChar(boolean isValid, char inputChar) {
-            this.isValid = isValid;
-            this.inputChar = inputChar;
-        }
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(
-                    new Object[][] { 
-                        { true, 'a' },
-                        { true, 'f' },
-                        { true, 'z' },
-                        { true, '0' },
-                        { true, '1' },
-                        { true, '9' },
-                        { true, 'A' },
-                        { true, 'G' },
-                        { true, 'Z' },
-                        { false, '@' },
-                    }
-            );
-        }
-
-        /**
-         * Test of isAlphaNumericChar method, of class StringUtil.
-         */
-        @Test
-        public void testIsAlphaNumericChar() {
-            assertEquals( isValid, StringUtil.isAlphaNumericChar(inputChar) );
-        }
+        // functionality-based partitioning
+        "abcdfg, NULL, ''",
+        "abcdfg, h, ''",
+        "abcdfg, b, bcdfg"
+    }, nullValues = "NULL")
+    void testSubstringIncludingLast(String str, String separator, String expectedString) {
+        assertEquals( expectedString, StringUtil.substringIncludingLast(str, separator) );
     }
 
-    @RunWith(Parameterized.class)
-    public static class TestSubstringIncludingLast {
-
-        public String str;
-        public String separator;
-        public String expectedString;
-        
-        public TestSubstringIncludingLast(String str, String separator, String expectedString) {
-            this.str = str;
-            this.separator = separator;
-            this.expectedString = expectedString;
-        }
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(
-                    new Object[][] { 
-                        // interface-based partitioning
-                        {null, null, null},
-                        {null, "", null},
-                        {null, "d", null},
-
-                        {"", null, ""},
-                        {"", "", ""},
-                        {"", "abcdfg", ""},
-
-                        {"abcdfg", null, ""},
-                        {"abcdfg", "", ""},
-                        {"abcdfg", "d", "dfg"},
-
-                        // functionality-based partitioning
-                        {"abcdfg" , null, ""},
-                        {"abcdfg", "h", ""},
-                        {"abcdfg", "b", "bcdfg"},
-                    }
-            );
-        }
-
-        @Test
-        public void testSubstringIncludingLast() {
-            assertEquals( expectedString, StringUtil.substringIncludingLast(str, separator) );
-        }
+    static Stream<Arguments> toOptionData() {
+        return Stream.of(
+            Arguments.of(Optional.empty(), null),
+            Arguments.of(Optional.empty(), ""),
+            Arguments.of(Optional.of("leadingWhitespace"), "    leadingWhitespace"),
+            Arguments.of(Optional.of("trailingWhiteSpace"), "trailingWhiteSpace    "),
+            Arguments.of(Optional.of("someString"), "someString"),
+            Arguments.of(Optional.of("some string with spaces"), "some string with spaces")
+        );
     }
-
-    @RunWith(Parameterized.class)
-    public static class TestToOption {
-
-        public String inputString;
-        public Optional<String> expected;
-
-        public TestToOption(String inputString, Optional<String> expected) {
-            this.inputString = inputString;
-            this.expected = expected;
-        }
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(
-                    new Object[][] { 
-                        {null, Optional.empty()},
-                        {"", Optional.empty()},
-                        {"    leadingWhitespace", Optional.of("leadingWhitespace")},
-                        {"trailingWhiteSpace    ", Optional.of("trailingWhiteSpace")},
-                        {"someString", Optional.of("someString")},
-                        {"some string with spaces", Optional.of("some string with spaces")}
-                    }
-            );
-        }
-
-        @Test
-        public void testToOption() {
-            assertEquals(expected, StringUtil.toOption(inputString));
-        }
+    
+    @ParameterizedTest
+    @MethodSource("toOptionData")
+    void testToOption(Optional<String> expected, String inputString) {
+        assertEquals(expected, StringUtil.toOption(inputString));
     }
-
-    @RunWith(Parameterized.class)
-    public static class TestSanitizeFileDirectory {
-
-        public String inputString;
-        public String expected;
-        public boolean aggressively;
-
-        public TestSanitizeFileDirectory(String inputString, String expected, boolean aggressively) {
-            this.inputString = inputString;
-            this.expected = expected;
-            this.aggressively = aggressively;
-        }
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(
-                    new Object[][] { 
-                        {"some\\path\\to\\a\\directory", "some/path/to/a/directory", false},
-                        {"some\\//path\\//to\\//a\\//directory", "some/path/to/a/directory", false},
-                        // starts with / or - or . or whitepsace
-                        {"/some/path/to/a/directory", "some/path/to/a/directory", false},
-                        {"-some/path/to/a/directory", "some/path/to/a/directory", false},
-                        {".some/path/to/a/directory", "some/path/to/a/directory", false},
-                        {" some/path/to/a/directory", "some/path/to/a/directory", false},
-                        // ends with / or - or . or whitepsace
-                        {"some/path/to/a/directory/", "some/path/to/a/directory", false},
-                        {"some/path/to/a/directory-", "some/path/to/a/directory", false},
-                        {"some/path/to/a/directory.", "some/path/to/a/directory", false},
-                        {"some/path/to/a/directory ", "some/path/to/a/directory", false},
-
-                        {"", null, false},
-                        {"/", null, false},
-
-                        // aggressively
-                        {"some/path/to/a/dire{`~}ctory", "some/path/to/a/dire.ctory", true},
-                        {"some/path/to/a/directory\\.\\.", "some/path/to/a/directory", true},
-                    }
-            );
-        }
-
-        @Test
-        public void testSanitizeFileDirectory() {
-            assertEquals(expected, StringUtil.sanitizeFileDirectory(inputString, aggressively));
-        }
+    
+    static Stream<Arguments> sanitizeData() {
+        return Stream.of(
+            Arguments.of("some\\path\\to\\a\\directory", "some/path/to/a/directory", false),
+            Arguments.of("some\\//path\\//to\\//a\\//directory", "some/path/to/a/directory", false),
+            // starts with / or - or . or whitepsace
+            Arguments.of("/some/path/to/a/directory", "some/path/to/a/directory", false),
+            Arguments.of("-some/path/to/a/directory", "some/path/to/a/directory", false),
+            Arguments.of(".some/path/to/a/directory", "some/path/to/a/directory", false),
+            Arguments.of(" some/path/to/a/directory", "some/path/to/a/directory", false),
+            // ends with / or - or . or whitepsace
+            Arguments.of("some/path/to/a/directory/", "some/path/to/a/directory", false),
+            Arguments.of("some/path/to/a/directory-", "some/path/to/a/directory", false),
+            Arguments.of("some/path/to/a/directory.", "some/path/to/a/directory", false),
+            Arguments.of("some/path/to/a/directory ", "some/path/to/a/directory", false),
+            
+            Arguments.of("", null, false),
+            Arguments.of("/", null, false),
+            
+            // aggressively
+            Arguments.of("some/path/to/a/dire{`~}ctory", "some/path/to/a/dire.ctory", true),
+            Arguments.of("some/path/to/a/directory\\.\\.", "some/path/to/a/directory", true)
+        );
+    }
+    
+    @ParameterizedTest
+    @MethodSource("sanitizeData")
+    void testSanitizeFileDirectory(String inputString, String expected, boolean aggressively) {
+        assertEquals(expected, StringUtil.sanitizeFileDirectory(inputString, aggressively));
     }
 
     public static class StringUtilNoParamTest{
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/UrlSignerUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/UrlSignerUtilTest.java
index 2b9d507758f..09739b67023 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/UrlSignerUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/UrlSignerUtilTest.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse.util;
 
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class UrlSignerUtilTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagChecksumTypeTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagChecksumTypeTest.java
index 2dfaf2b2371..905f8609276 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagChecksumTypeTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagChecksumTypeTest.java
@@ -3,7 +3,7 @@
 import org.apache.commons.io.IOUtils;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.util.Map;
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidationTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidationTest.java
index 4f7a22833e7..87b3382120d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidationTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidationTest.java
@@ -3,7 +3,7 @@
 import edu.harvard.iq.dataverse.util.bagit.BagValidation.FileValidationResult;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.nio.file.Path;
 import java.util.Optional;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidatorTest.java
index 24c45a8e75c..cbdb4b66e83 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidatorTest.java
@@ -8,8 +8,8 @@
 import org.apache.commons.lang3.RandomStringUtils;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.nio.file.Path;
@@ -33,7 +33,7 @@ public class BagValidatorTest {
     private ManifestReader manifestReader;
     private BagValidator target;
 
-    @Before
+    @BeforeEach
     public void beforeEachTest() {
         manifestReader = Mockito.mock(ManifestReader.class);
         target = Mockito.spy(new BagValidator(manifestReader));
@@ -46,7 +46,7 @@ public void hasBagItPackage_should_return_false_when_bagit_file_not_found() {
         boolean result = target.hasBagItPackage(fileDataProvider);
 
         MatcherAssert.assertThat(result, Matchers.is(false));
-        Mockito.verifyZeroInteractions(manifestReader);
+        Mockito.verifyNoInteractions(manifestReader);
     }
 
     @Test
@@ -95,7 +95,7 @@ public void validateChecksums_should_return_error_when_no_bagit_file_in_data_pro
         MatcherAssert.assertThat(result.getErrorMessage().isEmpty(), Matchers.is(false));
         Mockito.verify(target).getMessage(Mockito.eq("bagit.validation.bag.file.not.found"), Mockito.any());
 
-        Mockito.verifyZeroInteractions(manifestReader);
+        Mockito.verifyNoInteractions(manifestReader);
     }
 
     @Test
@@ -119,7 +119,7 @@ public void validateChecksums_should_return_error_when_manifest_reader_returns_e
 
         MatcherAssert.assertThat(result.success(), Matchers.is(false));
         MatcherAssert.assertThat(result.getErrorMessage().isEmpty(), Matchers.is(false));
-        Mockito.verify(target).getMessage(Mockito.eq("bagit.validation.manifest.not.supported"), Mockito.any());
+        Mockito.verify(target).getMessage(Mockito.eq("bagit.validation.manifest.not.supported"), Mockito.any(Object[].class));
 
         Mockito.verify(manifestReader).getManifestChecksums(fileDataProvider, expectedBagRoot);
     }
@@ -140,7 +140,7 @@ public void validateChecksums_should_return_error_when_data_provider_do_not_have
         for(Path filePath: checksums.getFileChecksums().keySet()) {
             MatcherAssert.assertThat(result.getFileResults().get(filePath).isError(), Matchers.is(true));
         }
-        Mockito.verify(target, Mockito.times(checksums.getFileChecksums().size())).getMessage(Mockito.eq("bagit.validation.file.not.found"), Mockito.any());
+        Mockito.verify(target, Mockito.times(checksums.getFileChecksums().size())).getMessage(Mockito.eq("bagit.validation.file.not.found"), Mockito.any(Object[].class));
 
         Mockito.verify(manifestReader).getManifestChecksums(fileDataProvider, expectedBagRoot);
         Mockito.verify(fileDataProvider).getFilePaths();
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/FileChecksumValidationJobTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/FileChecksumValidationJobTest.java
index 63a2650a5ef..08e853a58df 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/FileChecksumValidationJobTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/FileChecksumValidationJobTest.java
@@ -5,7 +5,7 @@
 import org.apache.commons.io.IOUtils;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
 import java.io.InputStream;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/ManifestReaderTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/ManifestReaderTest.java
index e31e2fcc87b..3e84fda92b5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/ManifestReaderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/ManifestReaderTest.java
@@ -5,7 +5,7 @@
 import edu.harvard.iq.dataverse.util.bagit.data.FileDataProviderFactory;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.nio.file.Path;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/DataFileDataProviderTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/DataFileDataProviderTest.java
index 50ebaff2b7c..1d5e20a985e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/DataFileDataProviderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/DataFileDataProviderTest.java
@@ -3,7 +3,7 @@
 import edu.harvard.iq.dataverse.DataFile;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.nio.file.Path;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FileDataProviderFactoryTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FileDataProviderFactoryTest.java
index c5eadb09811..f43a0c78284 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FileDataProviderFactoryTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FileDataProviderFactoryTest.java
@@ -2,7 +2,7 @@
 
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
 import java.nio.file.Path;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FolderDataProviderTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FolderDataProviderTest.java
index 7e892180bed..e403b32b1b3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FolderDataProviderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FolderDataProviderTest.java
@@ -2,7 +2,7 @@
 
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.io.IOException;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/ZipFileDataProviderTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/ZipFileDataProviderTest.java
index 084fb6ed50f..67bd13d61e4 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/ZipFileDataProviderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/ZipFileDataProviderTest.java
@@ -5,7 +5,7 @@
 import org.apache.commons.compress.archivers.zip.ZipFile;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.io.InputStream;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactoryTest.java b/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactoryTest.java
index d3f1dbcf805..8b047443745 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactoryTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactoryTest.java
@@ -4,18 +4,18 @@
 import edu.harvard.iq.dataverse.util.bagit.BagValidator;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
 import org.mockito.Mockito;
-import org.mockito.junit.MockitoJUnitRunner;
+import org.mockito.junit.jupiter.MockitoExtension;
 
 /**
  *
  * @author adaybujeda
  */
-@RunWith(MockitoJUnitRunner.class)
+@ExtendWith(MockitoExtension.class)
 public class BagItFileHandlerFactoryTest {
 
     @Mock
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerPostProcessorTest.java b/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerPostProcessorTest.java
index f8c7565af7c..064fa8b440f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerPostProcessorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerPostProcessorTest.java
@@ -5,7 +5,7 @@
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.nio.file.Path;
 import java.util.ArrayList;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerTest.java
index b3687285f6c..e8dff33db80 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerTest.java
@@ -14,8 +14,8 @@
 import edu.harvard.iq.dataverse.util.bagit.data.StringDataProvider;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.io.File;
@@ -47,7 +47,7 @@ public class BagItFileHandlerTest {
 
     private BagItFileHandler target;
 
-    @Before
+    @BeforeEach
     public void beforeEachTest() {
         FILE_UTIL = Mockito.mock(FileUtilWrapper.class, Mockito.RETURNS_DEEP_STUBS);
         SYSTEM_CONFIG = Mockito.mock(SystemConfig.class, Mockito.RETURNS_DEEP_STUBS);
@@ -99,7 +99,7 @@ public void handleBagItPackage_should_return_error_when_no_files_in_data_provide
         createDataFileResultAsserts(result);
 
         handleBagItPackageAsserts(fileDataProvider);
-        Mockito.verifyZeroInteractions(postProcessor);
+        Mockito.verifyNoInteractions(postProcessor);
     }
 
     @Test
@@ -231,7 +231,7 @@ public void handleBagItPackage_should_return_error_when_FileExceedsMaxSizeExcept
 
         handleBagItPackageAsserts(dataProviderSpy);
         createDataFileAsserts(Arrays.asList(Path.of(bagEntry)), 2);
-        Mockito.verifyZeroInteractions(postProcessor);
+        Mockito.verifyNoInteractions(postProcessor);
     }
 
     @Test
@@ -249,7 +249,7 @@ public void handleBagItPackage_should_return_error_when_the_maximum_number_of_fi
         MatcherAssert.assertThat(result.getErrors().size(), Matchers.is(1));
 
         handleBagItPackageAsserts(dataProviderSpy);
-        Mockito.verifyZeroInteractions(postProcessor);
+        Mockito.verifyNoInteractions(postProcessor);
     }
 
     @Test
@@ -266,7 +266,7 @@ public void handleBagItPackage_should_return_error_when_bag_validation_fails() t
 
         handleBagItPackageAsserts(dataProviderSpy);
         createDataFileAsserts(dataProviderWithDataFiles.dataProvider.getFilePaths());
-        Mockito.verifyZeroInteractions(postProcessor);
+        Mockito.verifyNoInteractions(postProcessor);
     }
 
     private void createDataFileResultAsserts(CreateDataFileResult result) {
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/file/CreateDataFileResultTest.java b/src/test/java/edu/harvard/iq/dataverse/util/file/CreateDataFileResultTest.java
index 7c392418de9..59d3ac15f11 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/file/CreateDataFileResultTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/file/CreateDataFileResultTest.java
@@ -3,7 +3,7 @@
 import edu.harvard.iq.dataverse.DataFile;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.util.Arrays;
 import java.util.Collections;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java
index dc36b197c55..b426f84a464 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java
@@ -6,15 +6,14 @@
 import edu.harvard.iq.dataverse.DatasetFieldType;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.MetadataBlock;
-import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import edu.harvard.iq.dataverse.workflow.Workflow;
-import javax.json.JsonObject;
-import org.junit.Test;
+import jakarta.json.JsonObject;
+import org.junit.jupiter.api.Test;
 
 import java.util.Collections;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/DatasetVersionDTOTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/DatasetVersionDTOTest.java
index c7077f936a0..7dd5ad9da8d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/json/DatasetVersionDTOTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/json/DatasetVersionDTOTest.java
@@ -17,12 +17,12 @@
 import java.util.HashSet;
 import java.util.List;
 import java.util.Scanner;
-import junit.framework.Assert;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 /**
  *
@@ -34,19 +34,19 @@ public class DatasetVersionDTOTest {
     public DatasetVersionDTOTest() {
     }
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
     
-    @Before
+    @BeforeEach
     public void setUp() {
        
     }
-    @After
+    @AfterEach
     public void tearDown() {
     }
     
@@ -89,7 +89,7 @@ public void testReadDataSet() {
             JsonElement expected = gson.toJsonTree(expectedDTO, FieldDTO.class);
             JsonElement result = gson.toJsonTree(authorDTO);
             
-            Assert.assertEquals(expected, result);
+            assertEquals(expected, result);
             
         } catch (IOException e) {
             e.printStackTrace();
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java
index 579711d63c3..972fc9c41cd 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java
@@ -27,19 +27,20 @@
 import edu.harvard.iq.dataverse.license.LicenseServiceBean;
 import edu.harvard.iq.dataverse.mocks.MockDatasetFieldSvc;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Assumptions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import javax.json.JsonValue;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonReader;
+import jakarta.json.JsonValue;
 import java.io.FileReader;
 import java.io.IOException;
 import java.io.InputStream;
@@ -58,9 +59,9 @@
 import java.util.Set;
 import java.util.TimeZone;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  *
@@ -81,15 +82,15 @@ public class JsonParserTest {
     public JsonParserTest() {
     }
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
     
-    @Before
+    @BeforeEach
     public void setUp() {
         datasetFieldTypeSvc = new MockDatasetFieldSvc();
         datasetFieldTypeSvc.setMetadataBlock("citation");
@@ -184,8 +185,8 @@ public void testControlledVocalRepeatsRoundTrip() throws JsonParseException {
     }
     
     
-    @Test(expected=JsonParseException.class)
-     public void testChildValidation() throws JsonParseException {
+    @Test
+    void testChildValidation() {
         // This Json String is a compound field that contains the wrong
         // fieldType as a child ("description" is not a child of "coordinate").
         // It should throw a JsonParseException when it encounters the invalid child.
@@ -210,8 +211,8 @@ public void testChildValidation() throws JsonParseException {
         JsonReader jsonReader = Json.createReader(new StringReader(text));
         JsonObject obj = jsonReader.readObject();
 
-        sut.parseField(obj);
-       }
+        assertThrows(JsonParseException.class, () -> sut.parseField(obj));
+    }
     
     
     @Test
@@ -333,12 +334,12 @@ public void testParseMinimalDataverse() throws JsonParseException {
      * @throws JsonParseException if all goes well - this is expected.
      * @throws IOException when test file IO goes wrong - this is bad.
      */
-    @Test(expected = JsonParseException.class)
-    public void testParseNoAliasDataverse() throws JsonParseException, IOException {
+    @Test
+    void testParseNoAliasDataverse() throws IOException {
         JsonObject dvJson;
         try (InputStream jsonFile = ClassLoader.getSystemResourceAsStream("json/no-alias-dataverse.json")) {
             dvJson = Json.createReader(jsonFile).readObject();
-            Dataverse actual = sut.parseDataverse(dvJson);
+            assertThrows(JsonParseException.class, () -> sut.parseDataverse(dvJson));
         }
     }
     
@@ -347,12 +348,12 @@ public void testParseNoAliasDataverse() throws JsonParseException, IOException {
      * @throws JsonParseException if all goes well - this is expected.
      * @throws IOException when test file IO goes wrong - this is bad.
      */
-    @Test(expected = JsonParseException.class)
-    public void testParseNoNameDataverse() throws JsonParseException, IOException {
+    @Test
+    void testParseNoNameDataverse() throws IOException {
         JsonObject dvJson;
         try (InputStream jsonFile = ClassLoader.getSystemResourceAsStream("json/no-name-dataverse.json")) {
             dvJson = Json.createReader(jsonFile).readObject();
-            Dataverse actual = sut.parseDataverse(dvJson);
+            assertThrows(JsonParseException.class, () -> sut.parseDataverse(dvJson));
         }
     }
     
@@ -362,12 +363,12 @@ public void testParseNoNameDataverse() throws JsonParseException, IOException {
      * @throws JsonParseException if all goes well - this is expected.
      * @throws IOException when test file IO goes wrong - this is bad.
      */
-    @Test(expected = JsonParseException.class)
-    public void testParseNoContactEmailsDataverse() throws JsonParseException, IOException {
+    @Test
+    void testParseNoContactEmailsDataverse() throws IOException {
         JsonObject dvJson;
         try (InputStream jsonFile = ClassLoader.getSystemResourceAsStream("json/no-contacts-dataverse.json")) {
             dvJson = Json.createReader(jsonFile).readObject();
-            Dataverse actual = sut.parseDataverse(dvJson);
+            assertThrows(JsonParseException.class, () -> sut.parseDataverse(dvJson));
         }
     }
 
@@ -420,16 +421,14 @@ public void testDateTimeRoundtrip() throws ParseException {
      * Expect an exception when the dataset JSON is empty.
      * @throws JsonParseException when the test is broken
      */
-    @Test(expected = NullPointerException.class)
-    public void testParseEmptyDataset() throws JsonParseException {
+    @Test
+    void testParseEmptyDataset() throws JsonParseException {
         JsonObject dsJson;
         try (InputStream jsonFile = ClassLoader.getSystemResourceAsStream("json/empty-dataset.json")) {
             InputStreamReader reader = new InputStreamReader(jsonFile, "UTF-8");
             dsJson = Json.createReader(reader).readObject();
             System.out.println(dsJson != null);
-            Dataset actual = sut.parseDataset(dsJson);
-            assertEquals("10.5072", actual.getAuthority());
-            assertEquals("doi", actual.getProtocol());
+            assertThrows(NullPointerException.class, () -> sut.parseDataset(dsJson));
         } catch (IOException ioe) {
             throw new JsonParseException("Couldn't read test file", ioe);
         }
@@ -443,13 +442,13 @@ public void testParseEmptyDataset() throws JsonParseException {
      * @throws IOException when test file IO goes wrong - this is bad.
      */
     @Test
-    public void testParseOvercompleteDatasetVersion() throws JsonParseException, IOException {
+    void testParseOvercompleteDatasetVersion() throws IOException {
         JsonObject dsJson;
         try (InputStream jsonFile = ClassLoader.getSystemResourceAsStream("json/complete-dataset-version.json")) {
             InputStreamReader reader = new InputStreamReader(jsonFile, "UTF-8");
             dsJson = Json.createReader(reader).readObject();
-            System.out.println(dsJson != null);
-            DatasetVersion actual = sut.parseDatasetVersion(dsJson);
+            Assumptions.assumeTrue(dsJson != null);
+            assertDoesNotThrow(() -> sut.parseDatasetVersion(dsJson));
         }
     }
     
@@ -566,31 +565,31 @@ public void testValidRegexMailDomainGroup() throws JsonParseException {
         assertEquals(test.hashCode(), parsed.hashCode());
     }
     
-    @Test(expected = JsonParseException.class)
-    public void testMailDomainGroupMissingName() throws JsonParseException {
+    @Test
+    void testMailDomainGroupMissingName() {
         // given
         String noname = "{ \"id\": 1, \"alias\": \"test\", \"domains\": [] }";
         JsonObject obj = Json.createReader(new StringReader(noname)).readObject();
         // when && then
-        MailDomainGroup parsed = new JsonParser().parseMailDomainGroup(obj);
+        assertThrows(JsonParseException.class, () -> new JsonParser().parseMailDomainGroup(obj));
     }
     
-    @Test(expected = JsonParseException.class)
-    public void testMailDomainGroupMissingDomains() throws JsonParseException {
+    @Test
+    void testMailDomainGroupMissingDomains() {
         // given
         String noname = "{ \"name\": \"test\", \"alias\": \"test\" }";
         JsonObject obj = Json.createReader(new StringReader(noname)).readObject();
         // when && then
-        MailDomainGroup parsed = new JsonParser().parseMailDomainGroup(obj);
+        assertThrows(JsonParseException.class, () -> new JsonParser().parseMailDomainGroup(obj));
     }
     
-    @Test(expected = JsonParseException.class)
-    public void testMailDomainGroupNotEnabledRegexDomains() throws JsonParseException {
+    @Test
+    void testMailDomainGroupNotEnabledRegexDomains() {
         // given
         String regexNotEnabled = "{ \"id\": 1, \"alias\": \"test\", \"domains\": [\"^foobar\\\\.com\"] }";
         JsonObject obj = Json.createReader(new StringReader(regexNotEnabled)).readObject();
         // when && then
-        MailDomainGroup parsed = new JsonParser().parseMailDomainGroup(obj);
+        assertThrows(JsonParseException.class, () -> new JsonParser().parseMailDomainGroup(obj));
     }
 
     @Test
@@ -712,8 +711,8 @@ public void testEnum() throws JsonParseException {
             arr.add(entry.name());
         }
         Set<Type> typesSet = new HashSet<>(JsonParser.parseEnumsFromArray(arr.build(), Type.class));
-        assertTrue("Set contains two elements", typesSet.size() == 2);
-        assertTrue("Set contains REVOKEROLE", typesSet.contains(Type.REVOKEROLE));
-        assertTrue("Set contains ASSIGNROLE", typesSet.contains(Type.ASSIGNROLE));
+        assertEquals(2, typesSet.size(), "Set contains two elements");
+        assertTrue(typesSet.contains(Type.REVOKEROLE), "Set contains REVOKEROLE");
+        assertTrue(typesSet.contains(Type.ASSIGNROLE), "Set contains ASSIGNROLE");
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java
index 8697b5aa354..1d054040e84 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java
@@ -19,16 +19,17 @@
 import java.util.List;
 import java.util.Set;
 
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonString;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonString;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import org.junit.Test;
-import org.junit.Before;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.BeforeEach;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 
@@ -36,7 +37,7 @@ public class JsonPrinterTest {
 
     MockDatasetFieldSvc datasetFieldTypeSvc = null;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         datasetFieldTypeSvc = new MockDatasetFieldSvc();
         datasetFieldTypeSvc.setMetadataBlock("citation");
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonUtilTest.java
index 847fbfc9cba..725862db7ba 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonUtilTest.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.util.json;
 
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 public class JsonUtilTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java
index 8aa10c9667f..b93028b6365 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java
@@ -11,16 +11,18 @@
 import edu.harvard.iq.dataverse.util.ShapefileHandler;
 import static edu.harvard.iq.dataverse.util.ShapefileHandler.SHP_XML_EXTENSION;
 
+import java.nio.file.Files;
+import java.nio.file.Path;
 import java.util.Arrays;
 import java.util.List;
 import java.io.File;
 
-import org.junit.Rule;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
 
-
-import org.junit.rules.TemporaryFolder;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
@@ -40,10 +42,8 @@
  */
 public class ShapefileHandlerTest {
     
-        
-    @Rule
-    public TemporaryFolder tempFolder = new TemporaryFolder();
-   
+    @TempDir
+    Path tempFolder;
     
     public void msg(String s){
             System.out.println(s);
@@ -61,28 +61,9 @@ private File createBlankFile(String filename) throws IOException {
         if (filename == null){
             return null;
         }
-        File aFile = this.tempFolder.newFile(filename);
-        //  FileUtils.writeStringToFile(tempFile, "hello world");
-
-        aFile.createNewFile();
-        return aFile;
+        return Files.createFile(tempFolder.resolve(filename)).toFile();
     }
     
-
-     
-     
-    private void showFilesInFolder(String m, String folder_name) throws IOException{
-        msgt(m);
-        File folder = new File(folder_name);
-        for (File f : folder.listFiles() ){
-            this.msg("fname: " + f.getCanonicalPath());
-        }
-    } 
-         
-    private void showFilesInTempFolder(String m) throws IOException{
-        this.showFilesInFolder(m, this.tempFolder.getRoot().getAbsolutePath());
-    } 
-    
     private FileInputStream createZipReturnFilestream(List<String> file_names, String zipfile_name) throws IOException{
         
         File zip_file_obj = this.createAndZipFiles(file_names, zipfile_name);
@@ -116,8 +97,8 @@ private File createAndZipFiles(List<String> file_names, String zipfile_name) thr
            //msg("File created: " + file_obj.getName());           
         }
         
-        File zip_file_obj = this.tempFolder.newFile(zipfile_name);
-        ZipOutputStream zip_stream = new ZipOutputStream(new FileOutputStream(zip_file_obj));
+        Path zip_file_obj = this.tempFolder.resolve(zipfile_name);
+        ZipOutputStream zip_stream = new ZipOutputStream(new FileOutputStream(zip_file_obj.toFile()));
 
         // Iterate through File objects and add them to the ZipOutputStream
         for (File file_obj : fileCollection) {
@@ -131,7 +112,7 @@ private File createAndZipFiles(List<String> file_names, String zipfile_name) thr
              file_obj.delete();
         }
         
-        return zip_file_obj;
+        return zip_file_obj.toFile();
         
     } // end createAndZipFiles
     
@@ -149,17 +130,14 @@ public void testCreateZippedNonShapefile() throws IOException{
         shp_handler.DEBUG= true;
 
         // Contains shapefile?
-        assertEquals(shp_handler.containsShapefile(), false);
+        assertFalse(shp_handler.containsShapefile());
 
         // get file_groups Map
         Map<String, List<String>> file_groups = shp_handler.getFileGroups();
         
-        assertEquals("verify that the dict is not empty", file_groups.isEmpty(), false);
-        assertEquals("verify key existance", file_groups.containsKey("not-quite-a-shape"), true);
-        assertEquals("verify value of key", file_groups.get("not-quite-a-shape"), Arrays.asList("shp", "shx", "dbf", "pdf"));
-        
-        this.showFilesInTempFolder(this.tempFolder.getRoot().getAbsolutePath());
-
+        assertFalse(file_groups.isEmpty(), "verify that the dict is not empty");
+        assertTrue(file_groups.containsKey("not-quite-a-shape"), "verify key existance");
+        assertEquals(List.of("shp", "shx", "dbf", "pdf"), file_groups.get("not-quite-a-shape"), "verify value of key");
         
         // Delete .zip
         zipfile_obj.delete();
@@ -185,8 +163,8 @@ public void testZippedTwoShapefiles() throws IOException{
         ShapefileHandler shp_handler = new ShapefileHandler(new FileInputStream(zipfile_obj));
         shp_handler.DEBUG= true;
         
-        assertEquals("verify shapefile existance", shp_handler.containsShapefile(), true);
-        assertEquals("verify that no error was found", shp_handler.errorFound, false);
+        assertTrue(shp_handler.containsShapefile(), "verify shapefile existance");
+        assertFalse(shp_handler.errorFound, "verify that no error was found");
         
         shp_handler.showFileGroups();
        // if (true){
@@ -195,21 +173,18 @@ public void testZippedTwoShapefiles() throws IOException{
         // get file_groups Map
         Map<String, List<String>> file_groups = shp_handler.getFileGroups();
         
-        assertEquals("verify that the dict is not empty", file_groups.isEmpty(), false);
+        assertFalse(file_groups.isEmpty(), "verify that the dict is not empty");
 
         // Verify the keys
-        assertEquals("verify key existance of 'shape1'", file_groups.containsKey("shape1"), true);
-        assertEquals("verify key existance of 'shape2'", file_groups.containsKey("shape2"), true);
+        assertTrue(file_groups.containsKey("shape1"), "verify key existance of 'shape1'");
+        assertTrue(file_groups.containsKey("shape2"), "verify key existance of 'shape2'");
 
         // Verify the values
-        assertEquals("verify value of key 'shape1'", file_groups.get("shape1"), Arrays.asList("shp", "shx", "dbf", "prj", "fbn", "fbx"));
-        assertEquals("verify value of key 'shape2'", file_groups.get("shape2"), Arrays.asList("shp", "shx", "dbf", "prj", "txt", "pdf", ShapefileHandler.BLANK_EXTENSION));
-        
-        this.showFilesInTempFolder(this.tempFolder.getRoot().getAbsolutePath());
-
+        assertEquals(file_groups.get("shape1"), Arrays.asList("shp", "shx", "dbf", "prj", "fbn", "fbx"), "verify value of key 'shape1'");
+        assertEquals(file_groups.get("shape2"), Arrays.asList("shp", "shx", "dbf", "prj", "txt", "pdf", ShapefileHandler.BLANK_EXTENSION), "verify value of key 'shape2'");
         
         // Rezip/Reorder the files
-        File test_unzip_folder = this.tempFolder.newFolder("test_unzip").getAbsoluteFile();
+        File test_unzip_folder = Files.createDirectory(this.tempFolder.resolve("test_unzip")).toFile();
         //File test_unzip_folder = new File("/Users/rmp553/Desktop/blah");
         shp_handler.rezipShapefileSets(new FileInputStream(zipfile_obj), test_unzip_folder );
         
@@ -218,9 +193,9 @@ public void testZippedTwoShapefiles() throws IOException{
         List<String> rezipped_filenames = new ArrayList<>();
         rezipped_filenames.addAll(Arrays.asList(test_unzip_folder.list()));
         msg("rezipped_filenames: " + rezipped_filenames);
-        List<String> expected_filenames = Arrays.asList("shape1.zip", "shape2.zip", "shape2.txt", "shape2.pdf", "shape2", "README.MD", "shp_dictionary.xls", "notes");  
-
-        assertEquals("verify that all files exist", rezipped_filenames.containsAll(rezipped_filenames), true);
+        List<String> expected_filenames = Arrays.asList("shape1.zip", "shape2.zip", "shape2.txt", "shape2.pdf", "shape2", "README.MD", "shp_dictionary.xls", "notes");
+        
+        assertTrue(rezipped_filenames.containsAll(expected_filenames), "verify that all files exist");
         
         // Delete .zip
         zipfile_obj.delete();
@@ -240,23 +215,23 @@ public void testZippedShapefileWithExtraFiles() throws IOException{
         // Pass the .zip to the ShapefileHandler
         ShapefileHandler shp_handler = new ShapefileHandler(new FileInputStream(zipfile_obj));
         shp_handler.DEBUG= true;
-
-        assertEquals("verify shapefile existance", shp_handler.containsShapefile(), true);
+        
+        assertTrue(shp_handler.containsShapefile(), "verify shapefile existance");
 
         // get file_groups Map
         Map<String, List<String>> file_groups = shp_handler.getFileGroups();
-
-        assertEquals("verify that the dict is not empty", file_groups.isEmpty(), false);
+        
+        assertFalse(file_groups.isEmpty(), "verify that the dict is not empty");
 
         // Verify the keys
-        assertEquals("verify key existance of 'shape1'", file_groups.containsKey("shape1"), true);
+        assertTrue(file_groups.containsKey("shape1"), "verify key existance of 'shape1'");
 
         // Verify the values
-        assertEquals("verify value of key 'shape1'", file_groups.get("shape1"), Arrays.asList("shp", "shx", "dbf", "prj", "pdf", "cpg", SHP_XML_EXTENSION));
-        assertEquals("verify value of key 'README'", file_groups.get("README"), Arrays.asList("md"));
-        assertEquals("verify value of key 'shape_notes'", file_groups.get("shape_notes"), Arrays.asList("txt"));
+        assertEquals(List.of("shp", "shx", "dbf", "prj", "pdf", "cpg", SHP_XML_EXTENSION), file_groups.get("shape1"), "verify value of key 'shape1'");
+        assertEquals(List.of("md"), file_groups.get("README"), "verify value of key 'README'");
+        assertEquals(List.of("txt"), file_groups.get("shape_notes"), "verify value of key 'shape_notes'");
         
-        File unzip2Folder = this.tempFolder.newFolder("test_unzip2").getAbsoluteFile();
+        File unzip2Folder = Files.createDirectory(this.tempFolder.resolve("test_unzip2")).toFile();
         // Rezip/Reorder the files
         shp_handler.rezipShapefileSets(new FileInputStream(zipfile_obj), unzip2Folder);
         //shp_handler.rezipShapefileSets(new FileInputStream(zipfile_obj), new File("/Users/rmp553/Desktop/blah"));
@@ -267,9 +242,9 @@ public void testZippedShapefileWithExtraFiles() throws IOException{
         rezipped_filenames.addAll(Arrays.asList(unzip2Folder.list()));
         
         msg("rezipped_filenames: " + rezipped_filenames);
-        List<String> expected_filenames = Arrays.asList("shape1.zip", "scratch-for-unzip-12345", "shape1.pdf", "README.md", "shape_notes.txt");  
-
-        assertEquals("verify that all files exist", expected_filenames.containsAll(rezipped_filenames), true);
+        List<String> expected_filenames = Arrays.asList("shape1.zip", "scratch-for-unzip-12345", "shape1.pdf", "README.md", "shape_notes.txt");
+        
+        assertTrue(expected_filenames.containsAll(rezipped_filenames), "verify that all files exist");
         
         // Delete .zip
         zipfile_obj.delete();
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/Tags.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/Tags.java
new file mode 100644
index 00000000000..dcbd0529d8b
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/Tags.java
@@ -0,0 +1,5 @@
+package edu.harvard.iq.dataverse.util.testing;
+
+public class Tags {
+    public static final String NOT_ESSENTIAL_UNITTESTS = "not-essential-unittests";
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlPrinterTest.java
index 8b027b797c5..1a190389ed5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlPrinterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlPrinterTest.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.util.xml;
 
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class XmlPrinterTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlValidatorTest.java
index dbb87e9e560..f339691fde3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlValidatorTest.java
@@ -1,28 +1,27 @@
 package edu.harvard.iq.dataverse.util.xml;
 
-import edu.harvard.iq.dataverse.NonEssentialTests;
-
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URL;
 
 import javax.xml.parsers.ParserConfigurationException;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.junit.jupiter.api.Assertions.fail;
 
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import edu.harvard.iq.dataverse.util.testing.Tags;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
 import org.xml.sax.SAXException;
 
 public class XmlValidatorTest {
 
     //Ignored as this relies on an external resource that has been down occasionally. 
     //May be a good test for our full vs. everytime test classifications (#4896) -MAD 4.9.1
-    @Ignore
-    @Category(NonEssentialTests.class)
+    @Disabled
+    @Tag(Tags.NOT_ESSENTIAL_UNITTESTS)
     @Test
     public void testValidateXml() throws IOException, SAXException, ParserConfigurationException {
         assertTrue(XmlValidator.validateXmlSchema("src/test/java/edu/harvard/iq/dataverse/util/xml/sendToDataCite.xml", new URL("https://schema.datacite.org/meta/kernel-3/metadata.xsd")));
diff --git a/src/test/java/edu/harvard/iq/dataverse/validation/EMailValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/validation/EMailValidatorTest.java
index 80d848248c0..0cbc9e52759 100644
--- a/src/test/java/edu/harvard/iq/dataverse/validation/EMailValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/validation/EMailValidatorTest.java
@@ -4,9 +4,9 @@
 import org.junit.jupiter.params.provider.Arguments;
 import org.junit.jupiter.params.provider.MethodSource;
 
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
 
 import java.util.Set;
 import java.util.stream.Stream;
diff --git a/src/test/java/edu/harvard/iq/dataverse/validation/PasswordValidatorUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/validation/PasswordValidatorUtilTest.java
index b574ac4a082..c5a38c473fb 100644
--- a/src/test/java/edu/harvard/iq/dataverse/validation/PasswordValidatorUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/validation/PasswordValidatorUtilTest.java
@@ -5,24 +5,18 @@
  */
 package edu.harvard.iq.dataverse.validation;
 
-import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.xml.html.HtmlPrinter;
 import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
 import java.util.List;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
+import java.util.stream.Stream;
 
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
+
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
 import org.passay.CharacterRule;
 import org.passay.EnglishCharacterData;
 
@@ -30,10 +24,9 @@
  *
  * @author pdurbin
  */
-@RunWith(Enclosed.class)
 public class PasswordValidatorUtilTest {
 
-    public static class PasswordValidatorUtilNoParamTest {
+    static class PasswordValidatorUtilNoParamTest {
         /**
          * Test of getPasswordRequirements method, of class PasswordValidatorUtil.
          */
@@ -83,79 +76,38 @@ public void testParseConfigString() {
 
     }
 
-    @RunWith(Parameterized.class)
-    public static class PasswordValidatorUtilParamTest {
-
-        // influences use of # or "each" in text generation
-        @Parameter(0)
-        public int numberOfCharacteristics;
-
-        @Parameter(1)
-        public String characterRulesConfigString;
-
-        @Parameter(2)
-        public String expectedValue;
-
-        @Parameters
-        public static Collection data() {
-            return Arrays.asList(new Object[][] {
-                {
-                    2,
-                    null,
-                    "At least 1 character from each of the following types: letter, numeral"
-                },
-                {
-                    2,
-                    "UpperCase:1,LowerCase:1,Digit:1,Special:1",
-                    "At least 1 character from 2 of the following types: uppercase, lowercase, numeral, special"
-                },
-                {
-                    4,
-                    "UpperCase:1,LowerCase:1,Digit:1,Special:1",
-                    "At least 1 character from each of the following types: uppercase, lowercase, numeral, special"
-                },
-
-                // Should say each, even if more characteristics set than possible
-                {
-                    2,
-                    "Digit:1",
-                    "At least 1 character from each of the following types: numeral"
-                },
-
-                {
-                    2,
-                    "Digit:2",
-                    "Fufill 2: At least 2 numeral characters"
-                },
-                {
-                    2,
-                    "LowerCase:1,Digit:2,Special:3",
-                    "Fufill 2: At least 1 lowercase characters, 2 numeral characters, 3 special characters"
-                },
-
-                // letter is mentioned even though that configuration is discouraged
-                {
-                    2,
-                    "UpperCase:1,LowerCase:1,Digit:1,Special:1,Alphabetical:1",
-                    "At least 1 character from 2 of the following types: uppercase, lowercase, letter, numeral, special"
-                }
-            });
-        }
-
-        @Test
-        public void testGetRequiredCharacters() {
-            List<CharacterRule> characterRules;
-            String message = "Character rules string for ";
-            if (characterRulesConfigString != null) {
-                characterRules = PasswordValidatorUtil.getCharacterRules(characterRulesConfigString);
-                message += characterRulesConfigString;
-            } else {
-                characterRules = PasswordValidatorUtil.getCharacterRulesDefault();
-                message += "default";
-            }
-
-            String reqString = PasswordValidatorUtil.getRequiredCharacters(characterRules, numberOfCharacteristics);
-            assertEquals(message + ": " + reqString, expectedValue, reqString);
+    static Stream<Arguments> configurations() {
+        return Stream.of(
+            Arguments.of(2, null,
+                "At least 1 character from each of the following types: letter, numeral"),
+            Arguments.of(2, "UpperCase:1,LowerCase:1,Digit:1,Special:1",
+                "At least 1 character from 2 of the following types: uppercase, lowercase, numeral, special"),
+            Arguments.of(4, "UpperCase:1,LowerCase:1,Digit:1,Special:1",
+                "At least 1 character from each of the following types: uppercase, lowercase, numeral, special"),
+            // Should say each, even if more characteristics set than possible
+            Arguments.of(2, "Digit:1", "At least 1 character from each of the following types: numeral"),
+            Arguments.of(2, "Digit:2", "Fufill 2: At least 2 numeral characters"),
+            Arguments.of(2, "LowerCase:1,Digit:2,Special:3",
+                "Fufill 2: At least 1 lowercase characters, 2 numeral characters, 3 special characters"),
+            // letter is mentioned even though that configuration is discouraged
+            Arguments.of(2, "UpperCase:1,LowerCase:1,Digit:1,Special:1,Alphabetical:1",
+                "At least 1 character from 2 of the following types: uppercase, lowercase, letter, numeral, special")
+        );
+    }
+    @ParameterizedTest
+    @MethodSource("configurations")
+    void testGetRequiredCharacters(int numberOfCharacteristics, String characterRulesConfigString, String expectedValue) {
+        List<CharacterRule> characterRules;
+        String message = "Character rules string for ";
+        if (characterRulesConfigString != null) {
+            characterRules = PasswordValidatorUtil.getCharacterRules(characterRulesConfigString);
+            message += characterRulesConfigString;
+        } else {
+            characterRules = PasswordValidatorUtil.getCharacterRulesDefault();
+            message += "default";
         }
+        
+        String reqString = PasswordValidatorUtil.getRequiredCharacters(characterRules, numberOfCharacteristics);
+        assertEquals(expectedValue, reqString, message + ": " + reqString);
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/validation/URLValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/validation/URLValidatorTest.java
index 3fe8501bbbf..8c29b609c9b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/validation/URLValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/validation/URLValidatorTest.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.validation;
 
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
 import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.Arguments;
 import org.junit.jupiter.params.provider.MethodSource;
diff --git a/src/test/java/edu/harvard/iq/dataverse/validation/UserNameValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/validation/UserNameValidatorTest.java
index a9816f81dca..1e5f0ca0371 100644
--- a/src/test/java/edu/harvard/iq/dataverse/validation/UserNameValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/validation/UserNameValidatorTest.java
@@ -7,9 +7,9 @@
 import org.junit.jupiter.params.provider.Arguments;
 import org.junit.jupiter.params.provider.MethodSource;
 
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
diff --git a/tests/run_docker_dataverse.sh b/tests/run_docker_dataverse.sh
deleted file mode 100644
index 56d55f8b057..00000000000
--- a/tests/run_docker_dataverse.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-
-cd ..
-. ./conf/docker-aio/prep_it.bash
diff --git a/tests/shell/spec/update_fields_spec.sh b/tests/shell/spec/update_fields_spec.sh
index e77121672dd..48054a121b7 100644
--- a/tests/shell/spec/update_fields_spec.sh
+++ b/tests/shell/spec/update_fields_spec.sh
@@ -1,16 +1,16 @@
 #shellcheck shell=sh
 
 update_fields() {
-  ../../conf/solr/8.11.1/update-fields.sh "$@"
+  ../../conf/solr/9.3.0/update-fields.sh "$@"
 }
 
 Describe "Update fields command"
 
   Describe "can operate on upstream data"
-    copyUpstreamSchema() { cp ../../conf/solr/8.11.1/schema.xml data/solr/upstream-schema.xml; }
+    copyUpstreamSchema() { cp ../../conf/solr/9.3.0/schema.xml data/solr/upstream-schema.xml; }
     AfterAll 'copyUpstreamSchema'
 
-    Path schema-xml="../../conf/solr/8.11.1/schema.xml"
+    Path schema-xml="../../conf/solr/9.3.0/schema.xml"
     It "needs upstream schema.xml"
       The path schema-xml should be exist
     End