diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index 7e6995d76d9..3dba7d52109 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -14,7 +14,7 @@ Thank you for contributing to the Dataverse Project through the creation of a bu
 WARNING: If this is a security issue it should be reported privately to security@dataverse.org
 
 More information on bug issues and contributions can be found in the "Contributing to Dataverse" page:
-https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md#bug-reportsissues
+https://guides.dataverse.org/en/latest/contributor/index.html
 
 Please fill out as much of the template as you can.
 Start below this comment section.
@@ -44,7 +44,6 @@ Start below this comment section.
 **Any related open or closed issues to this bug report?**
 
 
-
 **Screenshots:**
 
 No matter the issue, screenshots are always welcome.
@@ -53,3 +52,7 @@ To add a screenshot, please use one of the following formats and/or methods desc
 
 * https://help.github.com/en/articles/file-attachments-on-issues-and-pull-requests
 *
+
+
+**Are you thinking about creating a pull request for this issue?**
+Help is always welcome, is this bug something you or your organization plan to fix?
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
index d6248537418..7365cb4317c 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -1,7 +1,7 @@
 ---
 name: Feature request
 about: Suggest an idea or new feature for the Dataverse software!
-title: 'Feature Request/Idea:'
+title: 'Feature Request:'
 labels: 'Type: Feature'
 assignees: ''
 
@@ -11,7 +11,7 @@ assignees: ''
 Thank you for contributing to the Dataverse Project through the creation of a feature request!
 
 More information on ideas/feature requests and contributions can be found in the "Contributing to Dataverse" page:
-https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md#ideasfeature-requests
+https://guides.dataverse.org/en/latest/contributor/index.html
 
 Please fill out as much of the template as you can.
 Start below this comment section.
@@ -34,3 +34,6 @@ Start below this comment section.
 
 
 **Any open or closed issues related to this feature request?**
+
+**Are you thinking about creating a pull request for this feature?**  
+Help is always welcome, is this feature something you or your organization plan to implement?
diff --git a/.github/ISSUE_TEMPLATE/idea_proposal.md b/.github/ISSUE_TEMPLATE/idea_proposal.md
new file mode 100644
index 00000000000..8cb6c7bfafe
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/idea_proposal.md
@@ -0,0 +1,40 @@
+---
+name: Idea proposal
+about: Propose a new idea for discussion to improve the Dataverse software!
+title: 'Suggestion:'
+labels: 'Type: Suggestion'
+assignees: ''
+
+---
+
+<!--
+Thank you for contributing to the Dataverse Project through the creation of a feature request!
+
+More information on ideas/feature requests and contributions can be found in the "Contributing to Dataverse" page:
+https://guides.dataverse.org/en/latest/contributor/index.html
+
+Please fill out as much of the template as you can.
+Start below this comment section.
+-->
+
+**Overview of the Suggestion**
+
+
+**What kind of user is the suggestion intended for?**
+(Example users roles: API User, Curator, Depositor, Guest, Superuser, Sysadmin)
+
+
+**What inspired this idea?**
+
+
+**What existing behavior do you want changed?**
+
+
+**Any brand new behavior do you want to add to Dataverse?**
+
+
+**Any open or closed issues related to this suggestion?**
+
+
+**Are you thinking about creating a pull request for this issue?**
+Help is always welcome, is this idea something you or your organization plan to implement?
diff --git a/.github/workflows/check_property_files.yml b/.github/workflows/check_property_files.yml
new file mode 100644
index 00000000000..505310aab35
--- /dev/null
+++ b/.github/workflows/check_property_files.yml
@@ -0,0 +1,32 @@
+name: "Properties Check"
+on:
+    pull_request:
+        paths:
+            - "src/**/*.properties"
+            - "scripts/api/data/metadatablocks/*"
+jobs:
+    duplicate_keys:
+        name: Duplicate Keys
+        runs-on: ubuntu-latest
+        steps:
+            - uses: actions/checkout@v4
+            - name: Run duplicates detection script
+              shell: bash
+              run: tests/check_duplicate_properties.sh
+
+    metadata_blocks_properties:
+        name: Metadata Blocks Properties
+        runs-on: ubuntu-latest
+        steps:
+            - uses: actions/checkout@v4
+            - name: Setup GraalVM + Native Image
+              uses: graalvm/setup-graalvm@v1
+              with:
+                github-token: ${{ secrets.GITHUB_TOKEN }}
+                java-version: '21'
+                distribution: 'graalvm-community'
+            - name: Setup JBang
+              uses: jbangdev/setup-jbang@main
+            - name: Run metadata block properties verification script
+              shell: bash
+              run: tests/verify_mdb_properties.sh
diff --git a/.github/workflows/shellspec.yml b/.github/workflows/shellspec.yml
index 227a74fa00f..3320d9d08a4 100644
--- a/.github/workflows/shellspec.yml
+++ b/.github/workflows/shellspec.yml
@@ -24,28 +24,11 @@ jobs:
               run: |
                   cd tests/shell
                   shellspec
-    shellspec-centos7:
-        name: "CentOS 7"
+    shellspec-rocky9:
+        name: "RockyLinux 9"
         runs-on: ubuntu-latest
         container:
-            image: centos:7
-        steps:
-            - uses: actions/checkout@v2
-            - name: Install shellspec
-              run: |
-                  curl -fsSL https://github.com/shellspec/shellspec/releases/download/${{ env.SHELLSPEC_VERSION }}/shellspec-dist.tar.gz | tar -xz -C /usr/share
-                  ln -s /usr/share/shellspec/shellspec /usr/bin/shellspec
-            - name: Install dependencies
-              run: yum install -y ed
-            - name: Run shellspec
-              run: |
-                  cd tests/shell
-                  shellspec
-    shellspec-rocky8:
-        name: "RockyLinux 8"
-        runs-on: ubuntu-latest
-        container:
-            image: rockylinux/rockylinux:8
+            image: rockylinux/rockylinux:9
         steps:
             - uses: actions/checkout@v2
             - name: Install shellspec
diff --git a/doc/release-notes/10169-JSON-schema-validation.md b/doc/release-notes/10169-JSON-schema-validation.md
new file mode 100644
index 00000000000..92ff4a917d5
--- /dev/null
+++ b/doc/release-notes/10169-JSON-schema-validation.md
@@ -0,0 +1,3 @@
+### Improved JSON Schema validation for datasets
+
+Enhanced JSON schema validation with checks for required and allowed child objects, type checking for field types including `primitive`, `compound` and `controlledVocabulary`. More user-friendly error messages to help pinpoint the issues in the dataset JSON. See [Retrieve a Dataset JSON Schema for a Collection](https://guides.dataverse.org/en/6.3/api/native-api.html#retrieve-a-dataset-json-schema-for-a-collection) in the API Guide and PR #10543.
diff --git a/doc/release-notes/10341-croissant.md b/doc/release-notes/10341-croissant.md
new file mode 100644
index 00000000000..15bc7029099
--- /dev/null
+++ b/doc/release-notes/10341-croissant.md
@@ -0,0 +1,9 @@
+A new metadata export format called Croissant is now available as an external metadata exporter. It is oriented toward making datasets consumable by machine learning.
+
+When enabled, Croissant replaces the Schema.org JSON-LD format in the `<head>` of dataset landing pages. For details, see the [Schema.org JSON-LD/Croissant Metadata](https://dataverse-guide--10533.org.readthedocs.build/en/10533/admin/discoverability.html#schema-org-head) under the discoverability section of the Admin Guide.
+
+For more about the Croissant exporter, see https://github.com/gdcc/exporter-croissant
+
+For installation instructions, see [Enabling External Exporters](https://dataverse-guide--10533.org.readthedocs.build/en/10533/installation/advanced.html#enabling-external-exporters) in the Installation Guide.
+
+See also Issue #10341 and PR #10533.
diff --git a/doc/release-notes/10508-base-image-fixes.md b/doc/release-notes/10508-base-image-fixes.md
new file mode 100644
index 00000000000..148066435e8
--- /dev/null
+++ b/doc/release-notes/10508-base-image-fixes.md
@@ -0,0 +1,12 @@
+# Security and Compatibility Fixes to the Container Base Image
+
+- Switch "wait-for" to "wait4x", aligned with the Configbaker Image
+- Update "jattach" to v2.2
+- Install AMD64 / ARM64 versions of tools as necessary
+- Run base image as unprivileged user by default instead of `root` - this was an oversight from OpenShift changes
+- Linux User, Payara Admin and Domain Master passwords:
+  - Print hints about default, public knowledge passwords in place for
+  - Enable replacing these passwords at container boot time
+- Enable building with updates Temurin JRE image based on Ubuntu 24.04 LTS
+- Fix entrypoint script troubles with pre- and postboot script files
+- Unify location of files at CONFIG_DIR=/opt/payara/config, avoid writing to other places
\ No newline at end of file
diff --git a/doc/release-notes/10583-dataset-unlink-functionality-same-permission-as-link.md b/doc/release-notes/10583-dataset-unlink-functionality-same-permission-as-link.md
new file mode 100644
index 00000000000..f97bd252db3
--- /dev/null
+++ b/doc/release-notes/10583-dataset-unlink-functionality-same-permission-as-link.md
@@ -0,0 +1,2 @@
+New "Unlink Dataset" button has been added to the Dataset Page to allow a user to unlink a dataset from a collection that was previously linked with the "Link Dataset" button. The user must possess the same permissions needed to unlink the Dataset as they would to link the Dataset.
+The [existing API](https://guides.dataverse.org/en/6.3/admin/dataverses-datasets.html#unlink-a-dataset) for unlinking datasets has been updated to no longer require superuser access. The "Publish Dataset" permission is now enough.
diff --git a/doc/release-notes/10633-add-dataverse-api-extension.md b/doc/release-notes/10633-add-dataverse-api-extension.md
new file mode 100644
index 00000000000..f5d8030e8ac
--- /dev/null
+++ b/doc/release-notes/10633-add-dataverse-api-extension.md
@@ -0,0 +1 @@
+The addDataverse (/api/dataverses/{identifier}) API endpoint has been extended to allow adding metadata blocks, input levels and facet ids at creation time, as the Dataverse page in create mode does in JSF.
diff --git a/doc/release-notes/10726-dataverse-facets-api-extension.md b/doc/release-notes/10726-dataverse-facets-api-extension.md
new file mode 100644
index 00000000000..baf6f798e35
--- /dev/null
+++ b/doc/release-notes/10726-dataverse-facets-api-extension.md
@@ -0,0 +1,3 @@
+New optional query parameter "returnDetails" added to "dataverses/{identifier}/facets/" endpoint to include detailed information of each DataverseFacet.
+
+New endpoint "datasetfields/facetables" that lists all facetable dataset fields defined in the installation.
diff --git a/doc/release-notes/10733-add-publication-status-to-search-api-results.md b/doc/release-notes/10733-add-publication-status-to-search-api-results.md
new file mode 100644
index 00000000000..d015a50a00d
--- /dev/null
+++ b/doc/release-notes/10733-add-publication-status-to-search-api-results.md
@@ -0,0 +1,14 @@
+Search API (/api/search) response will now include publicationStatuses in the Json response as long as the list is not empty
+
+Example:
+```javascript
+"items": [
+    {
+        "name": "Darwin's Finches",
+        ...
+        "publicationStatuses": [
+            "Unpublished",
+            "Draft"
+        ],
+(etc, etc)
+```
diff --git a/doc/release-notes/10741-list-metadatablocks-display-on-create-fix.md b/doc/release-notes/10741-list-metadatablocks-display-on-create-fix.md
new file mode 100644
index 00000000000..4edadcaa1fc
--- /dev/null
+++ b/doc/release-notes/10741-list-metadatablocks-display-on-create-fix.md
@@ -0,0 +1 @@
+Fixed dataverses/{identifier}/metadatablocks endpoint to not return fields marked as displayOnCreate=true if there is an input level with include=false, when query parameters returnDatasetFieldTypes=true and onlyDisplayedOnCreate=true are set.
diff --git a/doc/release-notes/10744-ro-crate-docs.md b/doc/release-notes/10744-ro-crate-docs.md
new file mode 100644
index 00000000000..9d52b4578b4
--- /dev/null
+++ b/doc/release-notes/10744-ro-crate-docs.md
@@ -0,0 +1,3 @@
+## RO-Crate Support (Metadata Export)
+
+Dataverse now supports [RO-Crate](https://www.researchobject.org/ro-crate/) in the sense that dataset metadata can be exported in that format. This functionality is not available out of the box but you can enable one or more RO-Crate exporters from the [list of external exporters](https://preview.guides.gdcc.io/en/develop/installation/advanced.html#inventory-of-external-exporters). See also #10744.
diff --git a/doc/release-notes/10749-dataverse-user-permissions-api-extension.md b/doc/release-notes/10749-dataverse-user-permissions-api-extension.md
new file mode 100644
index 00000000000..706b1f42641
--- /dev/null
+++ b/doc/release-notes/10749-dataverse-user-permissions-api-extension.md
@@ -0,0 +1 @@
+New API endpoint "dataverses/{identifier}/userPermissions" for obtaining the user permissions on a dataverse.
diff --git a/doc/release-notes/10758-rust-client.md b/doc/release-notes/10758-rust-client.md
new file mode 100644
index 00000000000..e206f27ce65
--- /dev/null
+++ b/doc/release-notes/10758-rust-client.md
@@ -0,0 +1,3 @@
+### Rust API client library
+
+An API client library for the Rust programming language is now available at https://github.com/gdcc/rust-dataverse and has been added to the [list of client libraries](https://dataverse-guide--10758.org.readthedocs.build/en/10758/api/client-libraries.html) in the API Guide. See also #10758.
diff --git a/doc/release-notes/10797-update-current-version-bug-fix.md b/doc/release-notes/10797-update-current-version-bug-fix.md
new file mode 100644
index 00000000000..2cfaf69cad3
--- /dev/null
+++ b/doc/release-notes/10797-update-current-version-bug-fix.md
@@ -0,0 +1,11 @@
+A significant bug in the superuser-only "Update-Current-Version" publication was found and fixed in this release. If the Update-Current-Version option was used when changes were made to the dataset Terms (rather than to dataset metadata), or if the PID provider service was down/returned an error, the update would fail and render the dataset unusable and require restoration from a backup. The fix in this release allows the update to succeed in both of these cases and redesigns the functionality such that any unknown issues should not make the dataset unusable (i.e. the error would be reported and the dataset would remain in its current state with the last-published version as it was and changes still in the draft version.)
+
+Users of earlier Dataverse releases are encouraged to alert their superusers to this issue. Those who wish to disable this functionality have two options:
+* Change the dataset.updateRelease entry in the Bundle.properties file (or local language version) to "Do Not Use" or similar (doesn't disable but alerts superusers to the issue), or
+* Edit the dataset.xhtml file to remove the lines
+
+<c:if test="#{dataverseSession.user.isSuperuser()}">
+  <f:selectItem rendered="#" itemLabel="#{bundle['dataset.updateRelease']}" itemValue="3" />
+</c:if>
+
+, delete the contents of the generated and osgi-cache directories in the Dataverse Payara domain, and restart the Payara server.
diff --git a/doc/release-notes/7068-reserve-file-pids.md b/doc/release-notes/7068-reserve-file-pids.md
new file mode 100644
index 00000000000..182a0d7f67b
--- /dev/null
+++ b/doc/release-notes/7068-reserve-file-pids.md
@@ -0,0 +1,9 @@
+## Release Highlights
+
+### Pre-Publish File DOI Reservation with DataCite
+
+Dataverse installations using DataCite (or other persistent identifier (PID) Providers that support reserving PIDs) will be able to reserve PIDs for files when they are uploaded (rather than at publication time). Note that reserving file DOIs can slow uploads with large numbers of files so administrators may need to adjust timeouts (specifically any Apache "``ProxyPass / ajp://localhost:8009/ timeout=``" setting in the recommended Dataverse configuration).
+
+## Major Use Cases
+
+- Users will have DOIs/PIDs reserved for their files as part of file upload instead of at publication time. (Issue #7068, PR #7334)
diff --git a/doc/release-notes/9081-CC0-waiver-turned-into-custom-license.md b/doc/release-notes/9081-CC0-waiver-turned-into-custom-license.md
new file mode 100644
index 00000000000..042b2ec39fd
--- /dev/null
+++ b/doc/release-notes/9081-CC0-waiver-turned-into-custom-license.md
@@ -0,0 +1,6 @@
+In an earlier Dataverse release, Datasets with only 'CC0 Waiver' in termsofuse field were converted to 'Custom License' instead of CC0 1.0 licenses during an automated process. A new process was added to correct this. Only Datasets with no terms other than the one create by the previous process will be modified.
+- The existing 'Terms of Use' must be equal to 'This dataset is made available under a Creative Commons CC0 license with the following additional/modified terms and conditions: CC0 Waiver'
+- The following terms fields must be empty: Confidentiality Declaration, Special Permissions, Restrictions, Citation Requirements, Depositor Requirements, Conditions, and Disclaimer.
+- The License ID must not be assigned.
+
+This process will set the License ID to that of the CC0 1.0 license and remove the contents of termsofuse field.
diff --git a/doc/release-notes/9317-delete-saved-search.md b/doc/release-notes/9317-delete-saved-search.md
new file mode 100644
index 00000000000..34723801036
--- /dev/null
+++ b/doc/release-notes/9317-delete-saved-search.md
@@ -0,0 +1,4 @@
+### Saved search deletion
+
+Saved searches can now be removed using API `/api/admin/savedsearches/$id`. See PR #10198.
+This is reflected in the [Saved Search Native API section](https://dataverse-guide--10198.org.readthedocs.build/en/10198/api/native-api.html#saved-search) of the Guide.
\ No newline at end of file
diff --git a/doc/release-notes/api-blocking-filter-json.md b/doc/release-notes/api-blocking-filter-json.md
new file mode 100644
index 00000000000..337ff82dd8b
--- /dev/null
+++ b/doc/release-notes/api-blocking-filter-json.md
@@ -0,0 +1,3 @@
+* When any `ApiBlockingFilter` policy applies to a request, the JSON in the body of the error response is now valid JSON.
+  In case an API client did any special processing to allow it to parse the body, that is no longer necessary.
+  The status code of such responses has not changed.
diff --git a/doc/release-notes/make-data-count-.md b/doc/release-notes/make-data-count-.md
new file mode 100644
index 00000000000..9022582dddb
--- /dev/null
+++ b/doc/release-notes/make-data-count-.md
@@ -0,0 +1,3 @@
+### Counter Processor 1.05 Support
+
+This release includes support for counter-processor-1.05 for processing Make Data Count metrics. If you are running Make Data Counts support, you should reinstall/reconfigure counter-processor as described in the latest Guides. (For existing installations, note that counter-processor-1.05 requires a Python3, so you will need to follow the full counter-processor install. Also note that if you configure the new version the same way, it will reprocess the days in the current month when it is first run. This is normal and will not affect the metrics in Dataverse.)
diff --git a/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json b/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json
new file mode 100644
index 00000000000..fef32aa1e2c
--- /dev/null
+++ b/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json
@@ -0,0 +1,65 @@
+{
+  "name": "Scientific Research",
+  "alias": "science",
+  "dataverseContacts": [
+    {
+      "contactEmail": "pi@example.edu"
+    },
+    {
+      "contactEmail": "student@example.edu"
+    }
+  ],
+  "affiliation": "Scientific Research University",
+  "description": "We do all the science.",
+  "dataverseType": "LABORATORY",
+  "metadataBlocks": {
+    "metadataBlockNames": [
+      "citation", "geospatial"
+    ],
+    "inputLevels": [
+      {
+        "datasetFieldTypeName": "geographicCoverage",
+        "include": true,
+        "required": true
+      },
+      {
+        "datasetFieldTypeName": "country",
+        "include": true,
+        "required": true
+      },
+      {
+        "datasetFieldTypeName": "geographicUnit",
+        "include": false,
+        "required": false
+      },
+      {
+        "datasetFieldTypeName": "geographicBoundingBox",
+        "include": false,
+        "required": false
+      },
+      {
+        "datasetFieldTypeName": "westLongitude",
+        "include": false,
+        "required": false
+      },
+      {
+        "datasetFieldTypeName": "eastLongitude",
+        "include": false,
+        "required": false
+      },
+      {
+        "datasetFieldTypeName": "northLatitude",
+        "include": false,
+        "required": false
+      },
+      {
+        "datasetFieldTypeName": "southLatitude",
+        "include": false,
+        "required": false
+      }
+    ],
+    "facetIds": [
+      "authorName", "authorAffiliation"
+    ]
+  }
+}
diff --git a/doc/sphinx-guides/source/_static/util/counter_daily.sh b/doc/sphinx-guides/source/_static/util/counter_daily.sh
index 674972b18f2..5095a83b7e2 100644
--- a/doc/sphinx-guides/source/_static/util/counter_daily.sh
+++ b/doc/sphinx-guides/source/_static/util/counter_daily.sh
@@ -1,6 +1,6 @@
 #! /bin/bash
 
-COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-0.1.04"
+COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-1.05"
 MDC_LOG_DIRECTORY="/usr/local/payara6/glassfish/domains/domain1/logs/mdc"
 
 # counter_daily.sh
diff --git a/doc/sphinx-guides/source/_templates/navbar.html b/doc/sphinx-guides/source/_templates/navbar.html
index c7b81dcb937..d88306be8ae 100644
--- a/doc/sphinx-guides/source/_templates/navbar.html
+++ b/doc/sphinx-guides/source/_templates/navbar.html
@@ -25,7 +25,6 @@
                     <a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-haspopup="true" aria-expanded="false">About <span class="caret"></span></a>
                     <ul class="dropdown-menu">
                         <li><a target="_blank" href="https://dataverse.org/about">About the Project</a></li>
-                        <li><a target="_blank" href="https://dataverse.org/add-data">Add Data</a></li>
                         <li><a target="_blank" href="https://dataverse.org/blog">Blog</a></li>
                         <li><a target="_blank" href="https://dataverse.org/presentations">Presentations</a></li>
                         <li><a target="_blank" href="https://dataverse.org/publications">Publications</a></li>
diff --git a/doc/sphinx-guides/source/admin/dataverses-datasets.rst b/doc/sphinx-guides/source/admin/dataverses-datasets.rst
index 37494c57fa1..c6f15968f00 100644
--- a/doc/sphinx-guides/source/admin/dataverses-datasets.rst
+++ b/doc/sphinx-guides/source/admin/dataverses-datasets.rst
@@ -147,7 +147,7 @@ It returns a list in the following format:
 Unlink a Dataset
 ^^^^^^^^^^^^^^^^
 
-Removes a link between a dataset and a Dataverse collection. Only accessible to superusers. ::
+Removes a link between a dataset and a Dataverse collection. Accessible to users with Publish Dataset permissions. ::
 
     curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE http://$SERVER/api/datasets/$linked-dataset-id/deleteLink/$linking-dataverse-alias
 
diff --git a/doc/sphinx-guides/source/admin/discoverability.rst b/doc/sphinx-guides/source/admin/discoverability.rst
index 767bb55bce6..19ef7250a29 100644
--- a/doc/sphinx-guides/source/admin/discoverability.rst
+++ b/doc/sphinx-guides/source/admin/discoverability.rst
@@ -30,14 +30,21 @@ The HTML source of a dataset landing page includes "DC" (Dublin Core) ``<meta>``
         <meta name="DC.type" content="Dataset"
         <meta name="DC.title" content="..."
 
-Schema.org JSON-LD Metadata
-+++++++++++++++++++++++++++
+.. _schema.org-head:
 
-The HTML source of a dataset landing page includes Schema.org JSON-LD metadata like this::
+Schema.org JSON-LD/Croissant Metadata
++++++++++++++++++++++++++++++++++++++
+
+The ``<head>`` of the HTML source of a dataset landing page includes Schema.org JSON-LD metadata like this::
 
 
         <script type="application/ld+json">{"@context":"http://schema.org","@type":"Dataset","@id":"https://doi.org/...
 
+If you enable the Croissant metadata export format (see :ref:`external-exporters`) the ``<head>`` will show Croissant metadata instead. It looks similar, but you should see ``"cr": "http://mlcommons.org/croissant/"`` in the output.
+
+For backward compatibility, if you enable Croissant, the older Schema.org JSON-LD format (``schema.org`` in the API) will still be available from both the web interface (see :ref:`metadata-export-formats`) and the API (see :ref:`export-dataset-metadata-api`).
+
+The Dataverse team has been working with Google on both formats. Google has `indicated <https://github.com/mlcommons/croissant/issues/530#issuecomment-1964227662>`_ that for `Google Dataset Search <https://datasetsearch.research.google.com>`_ (the main reason we started adding this extra metadata in the ``<head>`` of dataset pages), Croissant is the successor to the older format.
 
 .. _discovery-sign-posting:
 
diff --git a/doc/sphinx-guides/source/admin/make-data-count.rst b/doc/sphinx-guides/source/admin/make-data-count.rst
index fe32af6649a..51bc2c4a9fe 100644
--- a/doc/sphinx-guides/source/admin/make-data-count.rst
+++ b/doc/sphinx-guides/source/admin/make-data-count.rst
@@ -16,7 +16,7 @@ Architecture
 
 Dataverse installations who would like support for Make Data Count must install `Counter Processor`_, a Python project created by California Digital Library (CDL) which is part of the Make Data Count project and which runs the software in production as part of their `DASH`_ data sharing platform.
 
-.. _Counter Processor: https://github.com/CDLUC3/counter-processor
+.. _Counter Processor: https://github.com/gdcc/counter-processor
 .. _DASH: https://cdluc3.github.io/dash/
 
 The diagram below shows how Counter Processor interacts with your Dataverse installation and the DataCite hub, once configured. Dataverse installations using Handles rather than DOIs should note the limitations in the next section of this page.
@@ -84,9 +84,9 @@ Configure Counter Processor
 
 * Change to the directory where you installed Counter Processor.
 
-  * ``cd /usr/local/counter-processor-0.1.04``
+  * ``cd /usr/local/counter-processor-1.05``
 
-* Download :download:`counter-processor-config.yaml <../_static/admin/counter-processor-config.yaml>` to ``/usr/local/counter-processor-0.1.04``.
+* Download :download:`counter-processor-config.yaml <../_static/admin/counter-processor-config.yaml>` to ``/usr/local/counter-processor-1.05``.
 
 * Edit the config file and pay particular attention to the FIXME lines.
 
@@ -99,7 +99,7 @@ Soon we will be setting up a cron job to run nightly but we start with a single
 
 * Change to the directory where you installed Counter Processor.
 
-  * ``cd /usr/local/counter-processor-0.1.04``
+  * ``cd /usr/local/counter-processor-1.05``
 
 * If you are running Counter Processor for the first time in the middle of a month, you will need create blank log files for the previous days. e.g.:
 
diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst
index bd0aa55ba99..6279ea8329e 100755
--- a/doc/sphinx-guides/source/api/client-libraries.rst
+++ b/doc/sphinx-guides/source/api/client-libraries.rst
@@ -78,3 +78,10 @@ Ruby
 https://github.com/libis/dataverse_api is a Ruby gem for Dataverse APIs. It is registered as a library on Rubygems (https://rubygems.org/search?query=dataverse).
 
 The gem is created and maintained by the LIBIS team (https://www.libis.be) at the University of Leuven (https://www.kuleuven.be).
+
+Rust
+----
+
+https://github.com/gdcc/rust-dataverse
+
+The Rust Dataverse client is a comprehensive crate designed for seamless interaction with the Dataverse API. It facilitates essential operations such as collection, dataset, and file management. Additionally, the crate includes a user-friendly command-line interface (CLI) that brings the full functionality of the library to the command line. This project is actively maintained by `Jan Range <https://github.com/jr-1991>`_.
diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst
index 75ee5a51f90..b16ea55bd25 100644
--- a/doc/sphinx-guides/source/api/native-api.rst
+++ b/doc/sphinx-guides/source/api/native-api.rst
@@ -66,6 +66,14 @@ The fully expanded example above (without environment variables) looks like this
 
 You should expect an HTTP 200 response and JSON beginning with "status":"OK" followed by a representation of the newly-created Dataverse collection.
 
+The request JSON supports an optional ``metadataBlocks`` object, with the following supported sub-objects:
+
+- ``metadataBlockNames``: The names of the metadata blocks you want to add to the Dataverse collection.
+- ``inputLevels``: The names of the fields in each metadata block for which you want to add a custom configuration regarding their inclusion or requirement when creating and editing datasets in the new Dataverse collection. Note that if the corresponding metadata blocks names are not specified in the ``metadataBlockNames``` field, they will be added automatically to the Dataverse collection.
+- ``facetIds``: The names of the fields to use as facets for browsing datasets and collections in the new Dataverse collection. Note that the order of the facets is defined by their order in the provided JSON array.
+
+To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs.
+
 .. _view-dataverse:
 
 View a Dataverse Collection
@@ -216,6 +224,22 @@ The fully expanded example above (without environment variables) looks like this
 
   curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/facets"
 
+By default, this endpoint will return an array including the facet names. If more detailed information is needed, we can set the query parameter ``returnDetails`` to ``true``, which will return the display name and id in addition to the name for each facet:
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=root
+
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/facets?returnDetails=true"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/facets?returnDetails=true"
+
 Set Facets for a Dataverse Collection
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
@@ -258,6 +282,25 @@ The fully expanded example above (without environment variables) looks like this
 
   curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablockfacets"
 
+List Field Type Input Levels Configured for a Dataverse Collection
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+|CORS| List the dataverse field type input levels configured for a given Dataverse collection ``id``:
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=root
+
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/inputLevels"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/inputLevels"
+
 Set Metadata Block Facets for a Dataverse Collection
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
@@ -539,9 +582,7 @@ The fully expanded example above (without environment variables) looks like this
 Retrieve a Dataset JSON Schema for a Collection
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Retrieves a JSON schema customized for a given collection in order to validate a dataset JSON file prior to creating the dataset. This
-first version of the schema only includes required elements and fields. In the future we plan to improve the schema by adding controlled
-vocabulary and more robust dataset field format testing:
+Retrieves a JSON schema customized for a given collection in order to validate a dataset JSON file prior to creating the dataset:
 
 .. code-block:: bash
 
@@ -566,8 +607,22 @@ While it is recommended to download a copy of the JSON Schema from the collectio
 Validate Dataset JSON File for a Collection
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Validates a dataset JSON file customized for a given collection prior to creating the dataset. The validation only tests for json formatting
-and the presence of required elements:
+Validates a dataset JSON file customized for a given collection prior to creating the dataset.
+
+The validation tests for:
+
+- JSON formatting
+- required fields
+- typeClass must follow these rules:
+
+  - if multiple = true then value must be a list
+  - if typeClass = ``primitive`` the value object is a String or a List of Strings depending on the multiple flag
+  - if typeClass = ``compound`` the value object is a FieldDTO or a List of FieldDTOs depending on the multiple flag
+  - if typeClass = ``controlledVocabulary`` the values are checked against the list of allowed values stored in the database
+  - typeName validations (child objects with their required and allowed typeNames are configured automatically by the database schema). Examples include:
+
+    - dsDescription validation includes checks for typeName = ``dsDescriptionValue`` (required) and ``dsDescriptionDate`` (optional)
+    - datasetContact validation includes checks for typeName = ``datasetContactName`` (required) and ``datasetContactEmail``; ``datasetContactAffiliation`` (optional)
 
 .. code-block:: bash
 
@@ -651,6 +706,29 @@ The fully expanded example above (without environment variables) looks like this
 
 Note: You must have "Edit Dataverse" permission in the given Dataverse to invoke this endpoint.
 
+Get User Permissions on a Dataverse
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This API call returns the permissions that the calling user has on a particular dataverse.
+
+In particular, the user permissions that this API call checks, returned as booleans, are the following:
+
+* Can add a dataverse
+* Can add a dataset
+* Can view the unpublished dataverse
+* Can edit the dataverse
+* Can manage the dataverse permissions
+* Can publish the dataverse
+* Can delete the dataverse
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=root
+
+  curl -H "X-Dataverse-key: $API_TOKEN" -X GET "$SERVER_URL/api/dataverses/$ID/userPermissions"
+
 .. _create-dataset-command: 
 
 Create a Dataset in a Dataverse Collection
@@ -1191,16 +1269,19 @@ The fully expanded example above (without environment variables) looks like this
 
 .. note:: Supported exporters (export formats) are ``ddi``, ``oai_ddi``, ``dcterms``, ``oai_dc``, ``schema.org`` , ``OAI_ORE`` , ``Datacite``, ``oai_datacite`` and ``dataverse_json``. Descriptive names can be found under :ref:`metadata-export-formats` in the User Guide.
 
+.. note:: Additional exporters can be enabled, as described under :ref:`external-exporters` in the Installation Guide. To discover the machine-readable name of each exporter (e.g. ``ddi``), check :ref:`inventory-of-external-exporters` or ``getFormatName`` in the exporter's source code.
 
 Schema.org JSON-LD
 ^^^^^^^^^^^^^^^^^^
 
-Please note that the ``schema.org`` format has changed in backwards-incompatible ways after Dataverse Software version 4.9.4:
+Please note that the ``schema.org`` format has changed in backwards-incompatible ways after Dataverse 4.9.4:
 
 - "description" was a single string and now it is an array of strings.
 - "citation" was an array of strings and now it is an array of objects.
 
-Both forms are valid according to Google's Structured Data Testing Tool at https://search.google.com/structured-data/testing-tool . (This tool will report "The property affiliation is not recognized by Google for an object of type Thing" and this known issue is being tracked at https://github.com/IQSS/dataverse/issues/5029 .) Schema.org JSON-LD is an evolving standard that permits a great deal of flexibility. For example, https://schema.org/docs/gs.html#schemaorg_expected indicates that even when objects are expected, it's ok to just use text. As with all metadata export formats, we will try to keep the Schema.org JSON-LD format your Dataverse installation emits backward-compatible to made integrations more stable, despite the flexibility that's afforded by the standard.
+Both forms are valid according to Google's Structured Data Testing Tool at https://search.google.com/structured-data/testing-tool . Schema.org JSON-LD is an evolving standard that permits a great deal of flexibility. For example, https://schema.org/docs/gs.html#schemaorg_expected indicates that even when objects are expected, it's ok to just use text. As with all metadata export formats, we will try to keep the Schema.org JSON-LD format backward-compatible to make integrations more stable, despite the flexibility that's afforded by the standard.
+
+The standard has further evolved into a format called Croissant. For details, see :ref:`schema.org-head` in the Admin Guide.
 
 List Files in a Dataset
 ~~~~~~~~~~~~~~~~~~~~~~~
@@ -4652,6 +4733,28 @@ The fully expanded example above (without environment variables) looks like this
 
   curl "https://demo.dataverse.org/api/metadatablocks/citation"
 
+.. _dataset-fields-api:
+
+Dataset Fields
+--------------
+
+List All Facetable Dataset Fields
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+List all facetable dataset fields defined in the installation.
+
+.. code-block:: bash
+
+  export SERVER_URL=https://demo.dataverse.org
+
+  curl "$SERVER_URL/api/datasetfields/facetables"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasetfields/facetables"
+
 .. _Notifications:
 
 Notifications
@@ -5068,7 +5171,7 @@ The fully expanded example above (without environment variables) looks like this
 Reserve a PID
 ~~~~~~~~~~~~~
 
-Reserved a PID for a dataset. A superuser API token is required.
+Reserve a PID for a dataset if not yet registered, and, if FilePIDs are enabled, reserve any file PIDs that are not yet registered. A superuser API token is required.
 
 .. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below.
 
@@ -5749,7 +5852,8 @@ The ``$identifier`` should start with an ``@`` if it's a user. Groups start with
 Saved Search
 ~~~~~~~~~~~~
 
-The Saved Search, Linked Dataverses, and Linked Datasets features shipped with Dataverse 4.0, but as a "`superuser-only <https://github.com/IQSS/dataverse/issues/90#issuecomment-86094663>`_" because they are **experimental** (see `#1364 <https://github.com/IQSS/dataverse/issues/1364>`_, `#1813 <https://github.com/IQSS/dataverse/issues/1813>`_, `#1840 <https://github.com/IQSS/dataverse/issues/1840>`_, `#1890 <https://github.com/IQSS/dataverse/issues/1890>`_, `#1939 <https://github.com/IQSS/dataverse/issues/1939>`_, `#2167 <https://github.com/IQSS/dataverse/issues/2167>`_, `#2186 <https://github.com/IQSS/dataverse/issues/2186>`_, `#2053 <https://github.com/IQSS/dataverse/issues/2053>`_, and `#2543 <https://github.com/IQSS/dataverse/issues/2543>`_). The following API endpoints were added to help people with access to the "admin" API make use of these features in their current form. There is a known issue (`#1364 <https://github.com/IQSS/dataverse/issues/1364>`_) that once a link to a Dataverse collection or dataset is created, it cannot be removed (apart from database manipulation and reindexing) which is why a ``DELETE`` endpoint for saved searches is neither documented nor functional. The Linked Dataverse collections feature is `powered by Saved Search <https://github.com/IQSS/dataverse/issues/1852>`_ and therefore requires that the "makelinks" endpoint be executed on a periodic basis as well.
+The Saved Search, Linked Dataverses, and Linked Datasets features are only accessible to superusers except for linking a dataset. The following API endpoints were added to help people with access to the "admin" API make use of these features in their current form. Keep in mind that they are partially experimental.
+The update of all saved search is run by a timer once a week (See :ref:`saved-search-timer`) so if you just created a saved search, you can run manually ``makelinks`` endpoint that will find new dataverses and datasets that match the saved search and then link the search results to the dataverse in which the saved search is defined.
 
 List all saved searches. ::
 
@@ -5759,6 +5863,12 @@ List a saved search by database id. ::
 
   GET http://$SERVER/api/admin/savedsearches/$id
 
+Delete a saved search by database id.
+
+The ``unlink=true`` query parameter unlinks all links (linked dataset or Dataverse collection) associated with the deleted saved search. Use of this parameter should be well considered as you cannot know if the links were created manually or by the saved search. After deleting a saved search with ``unlink=true``, we recommend running ``/makelinks/all`` just in case there was a dataset that was linked by another saved search. (Saved searches can link the same dataset.) Reindexing might be necessary as well.::
+
+  DELETE http://$SERVER/api/admin/savedsearches/$id?unlink=true
+
 Execute a saved search by database id and make links to Dataverse collections and datasets that are found. The JSON response indicates which Dataverse collections and datasets were newly linked versus already linked. The ``debug=true`` query parameter adds to the JSON response extra information about the saved search being executed (which you could also get by listing the saved search). ::
 
   PUT http://$SERVER/api/admin/savedsearches/makelinks/$id?debug=true
diff --git a/doc/sphinx-guides/source/api/search.rst b/doc/sphinx-guides/source/api/search.rst
index e8d0a0b3ea7..297f1283ef7 100755
--- a/doc/sphinx-guides/source/api/search.rst
+++ b/doc/sphinx-guides/source/api/search.rst
@@ -114,6 +114,9 @@ https://demo.dataverse.org/api/search?q=trees
                     "identifier_of_dataverse":"dvbe69f5e1",
                     "name_of_dataverse":"dvbe69f5e1",
                     "citation":"Finch, Fiona; Spruce, Sabrina; Poe, Edgar Allen; Mulligan, Hercules, 2019, \"Darwin's Finches\", https://doi.org/10.70122/FK2/MB5VGR, Root, V3",
+                    "publicationStatuses": [
+                        "Published"
+                    ],
                     "storageIdentifier":"file://10.70122/FK2/MB5VGR",
                     "subjects":[  
                        "Astronomy and Astrophysics",
@@ -207,6 +210,9 @@ In this example, ``show_relevance=true`` matches per field are shown. Available
                     "published_at":"2016-05-10T12:57:45Z",
                     "citationHtml":"Finch, Fiona, 2016, \"Darwin's Finches\", <a href=\"http://dx.doi.org/10.5072/FK2/G2VPE7\" target=\"_blank\">http://dx.doi.org/10.5072/FK2/G2VPE7</a>, Root Dataverse, V1",
                     "citation":"Finch, Fiona, 2016, \"Darwin's Finches\", http://dx.doi.org/10.5072/FK2/G2VPE7, Root Dataverse, V1",
+                    "publicationStatuses": [
+                        "Published"
+                    ],
                     "matches":[
                         {
                             "authorName":{
@@ -297,6 +303,9 @@ The above example ``fq=publicationStatus:Published`` retrieves only "RELEASED" v
                     "identifier_of_dataverse": "rahman",
                     "name_of_dataverse": "mdmizanur rahman Dataverse collection",
                     "citation": "Finch, Fiona, 2019, \"Darwin's Finches\", https://doi.org/10.70122/FK2/GUAS41, Demo Dataverse, V1",
+                    "publicationStatuses": [
+                        "Published"
+                    ],
                     "storageIdentifier": "file://10.70122/FK2/GUAS41",
                     "subjects": [
                         "Medicine, Health and Life Sciences"
@@ -330,6 +339,9 @@ The above example ``fq=publicationStatus:Published`` retrieves only "RELEASED" v
                     "identifier_of_dataverse": "demo",
                     "name_of_dataverse": "Demo Dataverse",
                     "citation": "Finch, Fiona, 2020, \"Darwin's Finches\", https://doi.org/10.70122/FK2/7ZXYRH, Demo Dataverse, V1",
+                    "publicationStatuses": [
+                        "Published"
+                    ],
                     "storageIdentifier": "file://10.70122/FK2/7ZXYRH",
                     "subjects": [
                         "Medicine, Health and Life Sciences"
@@ -386,6 +398,10 @@ The above example ``metadata_fields=citation:*`` returns under "metadataBlocks"
                     "identifier_of_dataverse": "Sample_data",
                     "name_of_dataverse": "Sample Data",
                     "citation": "MĂ©tropole, 2021, \"JDD avec GeoJson 2021-07-13T10:23:46.409Z\", https://doi.org/10.5072/FK2/GIWCKB, Root, DRAFT VERSION",
+                    "publicationStatuses": [
+                        "Unpublished",
+                        "Draft"
+                    ],
                     "storageIdentifier": "file://10.5072/FK2/GIWCKB",
                     "subjects": [
                         "Other"
diff --git a/doc/sphinx-guides/source/container/base-image.rst b/doc/sphinx-guides/source/container/base-image.rst
index c41250d48c5..0005265fb1c 100644
--- a/doc/sphinx-guides/source/container/base-image.rst
+++ b/doc/sphinx-guides/source/container/base-image.rst
@@ -46,7 +46,7 @@ The base image provides:
 - CLI tools necessary to run Dataverse (i. e. ``curl`` or ``jq`` - see also :doc:`../installation/prerequisites` in Installation Guide)
 - Linux tools for analysis, monitoring and so on
 - `Jattach <https://github.com/apangin/jattach>`__ (attach to running JVM)
-- `wait-for <https://github.com/eficode/wait-for>`__ (tool to "wait for" a service to be available)
+- `wait4x <https://github.com/atkrad/wait4x>`__ (tool to "wait for" a service to be available)
 - `dumb-init <https://github.com/Yelp/dumb-init>`__ (see :ref:`below <base-entrypoint>` for details)
 
 This image is created as a "multi-arch image", see :ref:`below <base-multiarch>`.
@@ -85,7 +85,7 @@ Some additional notes, using Maven parameters to change the build and use ...:
     (See also `Docker Hub search example <https://hub.docker.com/_/eclipse-temurin/tags?page=1&name=11-jre>`_)
 - ... a different Java Distribution: add ``-Djava.image="name:tag"`` with precise reference to an
   image available local or remote.
-- ... a different UID/GID for the ``payara`` user/group: add ``-Dbase.image.uid=1234`` (or ``.gid``)
+- ... a different UID/GID for the ``payara`` user/group (default ``1000:1000``): add ``-Dbase.image.uid=1234`` (or ``.gid``)
 
 Automated Builds & Publishing
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -151,12 +151,12 @@ provides. These are mostly based on environment variables (very common with cont
       - [preboot]_
       - Abs. path
       - Provide path to file with ``asadmin`` commands to run **before** boot of application server.
-        See also `Pre/postboot script docs`_.
+        See also `Pre/postboot script docs`_. Must be writeable by Payara Linux user!
     * - ``POSTBOOT_COMMANDS``
       - [postboot]_
       - Abs. path
       - Provide path to file with ``asadmin`` commands to run **after** boot of application server.
-        See also `Pre/postboot script docs`_.
+        See also `Pre/postboot script docs`_. Must be writeable by Payara Linux user!
     * - ``JVM_ARGS``
       - (empty)
       - String
@@ -231,6 +231,18 @@ provides. These are mostly based on environment variables (very common with cont
       - See :ref:`:ApplicationServerSettings` ``http.request-timeout-seconds``.
 
         *Note:* can also be set using any other `MicroProfile Config Sources`_ available via ``dataverse.http.timeout``.
+    * - ``PAYARA_ADMIN_PASSWORD``
+      - ``admin``
+      - String
+      - Set to secret string to change `Payara Admin Console`_ Adminstrator User ("admin") password.
+    * - ``LINUX_PASSWORD``
+      - ``payara``
+      - String
+      - Set to secret string to change the Payara Linux User ("payara", default UID=1000) password.
+    * - ``DOMAIN_PASSWORD``
+      - ``changeit``
+      - String
+      - Set to secret string to change the `Domain Master Password`_.
 
 
 .. [preboot] ``${CONFIG_DIR}/pre-boot-commands.asadmin``
@@ -374,3 +386,5 @@ from `run-java-sh recommendations`_.
 .. _Pre/postboot script docs: https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Micro%20Documentation/Payara%20Micro%20Configuration%20and%20Management/Micro%20Management/Asadmin%20Commands/Pre%20and%20Post%20Boot%20Commands.html
 .. _MicroProfile Config Sources: https://docs.payara.fish/community/docs/Technical%20Documentation/MicroProfile/Config/Overview.html
 .. _run-java-sh recommendations: https://github.com/fabric8io-images/run-java-sh/blob/master/TUNING.md#recommandations
+.. _Domain Master Password: https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Server%20Documentation/Security%20Guide/Administering%20System%20Security.html#to-change-the-master-password
+.. _Payara Admin Console: https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Server%20Documentation/General%20Administration/Overview.html#administration-console
\ No newline at end of file
diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst
index a28757165c5..6a1edcf7ebd 100644
--- a/doc/sphinx-guides/source/container/dev-usage.rst
+++ b/doc/sphinx-guides/source/container/dev-usage.rst
@@ -341,6 +341,11 @@ The steps below describe options to enable the later in different IDEs.
 
     **IMPORTANT**: This tool uses a Bash shell script and is thus limited to Mac and Linux OS.
 
+Exploring the Database
+----------------------
+
+See :ref:`db-name-creds` in the Developer Guide.
+
 Using a Debugger
 ----------------
 
diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst
index 0508639c616..f9642347558 100644
--- a/doc/sphinx-guides/source/container/running/demo.rst
+++ b/doc/sphinx-guides/source/container/running/demo.rst
@@ -124,8 +124,6 @@ Some JVM options can be configured as environment variables. For example, you ca
 
 We are in the process of making more JVM options configurable as environment variables. Look for the term "MicroProfile Config" in under :doc:`/installation/config` in the Installation Guide to know if you can use them this way.
 
-Please note that for a few environment variables (the ones that start with ``%ct`` in :download:`microprofile-config.properties <../../../../../src/main/resources/META-INF/microprofile-config.properties>`), you have to prepend ``_CT_`` to make, for example, ``_CT_DATAVERSE_SITEURL``. We are working on a fix for this in https://github.com/IQSS/dataverse/issues/10285.
-
 There is a final way to configure JVM options that we plan to deprecate once all JVM options have been converted to MicroProfile Config. Look for "magic trick" under "tunables" at :doc:`../app-image` for more information.
 
 Database Settings
diff --git a/doc/sphinx-guides/source/contributor/code.md b/doc/sphinx-guides/source/contributor/code.md
index 2a1dec08c05..c7154d14169 100644
--- a/doc/sphinx-guides/source/contributor/code.md
+++ b/doc/sphinx-guides/source/contributor/code.md
@@ -20,6 +20,7 @@ The primary codebase and issue tracker for Dataverse is <https://github.com/IQSS
 - <https://github.com/IQSS/dataverse-client-javascript> (TypeScript)
 - <https://github.com/gdcc/dataverse-previewers> (Javascript)
 - <https://github.com/gdcc/pyDataverse> (Python)
+- <https://github.com/gdcc/rust-dataverse> (Rust)
 - <https://github.com/gdcc/dataverse-ansible> (Ansible)
 - <https://github.com/gdcc/dv-metrics> (Javascript)
 
diff --git a/doc/sphinx-guides/source/contributor/documentation.md b/doc/sphinx-guides/source/contributor/documentation.md
index 12a4266c9ff..96277c3b373 100644
--- a/doc/sphinx-guides/source/contributor/documentation.md
+++ b/doc/sphinx-guides/source/contributor/documentation.md
@@ -74,8 +74,10 @@ In some parts of the documentation, graphs are rendered as images using the Sphi
 
 Building the guides requires the ``dot`` executable from GraphViz.
 
-This requires having `GraphViz <https://graphviz.org>`_ installed and either having ``dot`` on the path or
-`adding options to the make call <https://groups.google.com/forum/#!topic/sphinx-users/yXgNey_0M3I>`_.
+This requires having [GraphViz](https://graphviz.org) installed and either having ``dot`` on the path or
+[adding options to the `make` call](https://groups.google.com/forum/#!topic/sphinx-users/yXgNey_0M3I).
+
+On a Mac we recommend installing GraphViz through [Homebrew](<https://brew.sh>). Once you have Homebrew installed and configured to work with your shell, you can type `brew install graphviz`.
 
 ### Editing and Building the Guides
 
@@ -129,6 +131,7 @@ Please observe the following when writing documentation:
 - Use American English spelling.
 - Use examples when possible.
 - Break up longer paragraphs.
+- Use Title Case in Headings.
 - Use "double quotes" instead of 'single quotes'.
 - Favor "and" (data and code) over slashes (data/code).
 
@@ -148,6 +151,18 @@ If the page is written in Markdown (.md), use this form:
     :depth: 3
     ```
 
+### Links
+
+Getting links right with .rst files can be tricky.
+
+#### Custom Titles
+
+You can use a custom title when linking to a document like this:
+
+    :doc:`Custom title </api/intro>`
+
+See also <https://docs.readthedocs.io/en/stable/guides/cross-referencing-with-sphinx.html#the-doc-role>
+
 ### Images
 
 A good documentation is just like a website enhanced and upgraded by adding high quality and self-explanatory images.  Often images depict a lot of written text in a simple manner. Within our Sphinx docs, you can add them in two ways: a) add a PNG image directly and include or b) use inline description languages like GraphViz (current only option).
diff --git a/doc/sphinx-guides/source/contributor/index.md b/doc/sphinx-guides/source/contributor/index.md
index e75cc58bccd..1017f15f0ed 100644
--- a/doc/sphinx-guides/source/contributor/index.md
+++ b/doc/sphinx-guides/source/contributor/index.md
@@ -43,7 +43,7 @@ If you speak multiple languages, you are very welcome to help us translate Datav
 
 ## Code
 
-Dataverse is open source and we love code contributions. Developers are not limited to the main Dataverse code in this git repo. We have projects in C, C++, Go, Java, Javascript, Julia, PHP, Python, R, Ruby, TypeScript and more. To get started, please see the following pages:
+Dataverse is open source and we love code contributions. Developers are not limited to the main Dataverse code in this git repo. We have projects in C, C++, Go, Java, Javascript, Julia, PHP, Python, R, Ruby, Rust, TypeScript and more. To get started, please see the following pages:
 
 ```{toctree}
 :maxdepth: 1
diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst
index 2837f901d5e..f0d1b116ea7 100755
--- a/doc/sphinx-guides/source/developers/dev-environment.rst
+++ b/doc/sphinx-guides/source/developers/dev-environment.rst
@@ -32,14 +32,20 @@ Install Java
 
 The Dataverse Software requires Java 17.
 
-On Mac and Windows, we suggest downloading OpenJDK from https://adoptium.net (formerly `AdoptOpenJDK <https://adoptopenjdk.net>`_) or `SDKMAN <https://sdkman.io>`_.
+On Mac and Windows, we suggest using `SDKMAN <https://sdkman.io>`_ to install Temurin (Eclipe's name for its OpenJDK distribution). Type ``sdk install java 17`` and then hit the "tab" key until you get to a version that ends with ``-tem`` and then hit enter.
+
+Alternatively you can download Temurin from https://adoptium.net (formerly `AdoptOpenJDK <https://adoptopenjdk.net>`_).
 
 On Linux, you are welcome to use the OpenJDK available from package managers.
 
 Install Maven
 ~~~~~~~~~~~~~
 
-Follow instructions at https://maven.apache.org
+If you are using SKDMAN, run this command:
+
+``sdk install maven``
+
+Otherwise, follow instructions at https://maven.apache.org.
 
 Install and Start Docker
 ~~~~~~~~~~~~~~~~~~~~~~~~
diff --git a/doc/sphinx-guides/source/developers/make-data-count.rst b/doc/sphinx-guides/source/developers/make-data-count.rst
index edad580e451..f347e7b8ff9 100644
--- a/doc/sphinx-guides/source/developers/make-data-count.rst
+++ b/doc/sphinx-guides/source/developers/make-data-count.rst
@@ -1,7 +1,7 @@
 Make Data Count
 ===============
 
-Support for Make Data Count is a feature of the Dataverse Software that is described in the :doc:`/admin/make-data-count` section of the Admin Guide. In order for developers to work on the feature, they must install Counter Processor, a Python 3 application, as described below. Counter Processor can be found at https://github.com/CDLUC3/counter-processor
+Support for Make Data Count is a feature of the Dataverse Software that is described in the :doc:`/admin/make-data-count` section of the Admin Guide. In order for developers to work on the feature, they must install Counter Processor, a Python 3 application, as described below. Counter Processor can be found at https://github.com/gdcc/counter-processor
 
 .. contents:: |toctitle|
         :local:
@@ -49,7 +49,7 @@ Once you are done with your configuration, you can run Counter Processor like th
 
 ``su - counter``
 
-``cd /usr/local/counter-processor-0.1.04``
+``cd /usr/local/counter-processor-1.05``
 
 ``CONFIG_FILE=counter-processor-config.yaml python39 main.py``
 
@@ -82,7 +82,7 @@ Second, if you are also sending your SUSHI report to Make Data Count, you will n
 
 ``curl -H "Authorization: Bearer $JSON_WEB_TOKEN" -X DELETE https://$MDC_SERVER/reports/$REPORT_ID``
 
-To get the ``REPORT_ID``, look at the logs generated in ``/usr/local/counter-processor-0.1.04/tmp/datacite_response_body.txt``
+To get the ``REPORT_ID``, look at the logs generated in ``/usr/local/counter-processor-1.05/tmp/datacite_response_body.txt``
 
 To read more about the Make Data Count api, see https://github.com/datacite/sashimi
 
diff --git a/doc/sphinx-guides/source/developers/making-library-releases.rst b/doc/sphinx-guides/source/developers/making-library-releases.rst
index 63b6eeb1c2a..be867f9196a 100755
--- a/doc/sphinx-guides/source/developers/making-library-releases.rst
+++ b/doc/sphinx-guides/source/developers/making-library-releases.rst
@@ -69,6 +69,8 @@ These examples from the SWORD library. Below is what to expect from the interact
         What is the new development version for "SWORD v2 Common Server Library (forked)"? (sword2-server) 2.0.1-SNAPSHOT: :
         [INFO] 8/17 prepare:rewrite-poms-for-release
 
+Note that a commit or two will be made and pushed but if you do a ``git status`` you will see that locally you are behind by that number of commits. To fix this, you can just do a ``git pull``.
+
 It can take some time for the jar to be visible on Maven Central. You can start by looking on the repo1 server, like this: https://repo1.maven.org/maven2/io/gdcc/sword2-server/2.0.0/
 
 Don't bother putting the new version in a pom.xml until you see it on repo1.
@@ -80,14 +82,65 @@ Releasing a New Library to Maven Central
 
 At a high level:
 
+- Start with a snapshot release.
 - Use an existing pom.xml as a starting point.
 - Use existing GitHub Actions workflows as a starting point.
 - Create secrets in the new library's GitHub repo used by the workflow.
 - If you need an entire new namespace, look at previous issues such as https://issues.sonatype.org/browse/OSSRH-94575 and https://issues.sonatype.org/browse/OSSRH-94577
 
+Updating pom.xml for a Snapshot Release
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Before publishing a final version to Maven Central, you should publish a snapshot release or two. For each snapshot release you publish, the jar name will be unique each time (e.g. ``foobar-0.0.1-20240430.175110-3.jar``), so you can safely publish over and over with the same version number.
+
+We use the `Nexus Staging Maven Plugin <https://github.com/sonatype/nexus-maven-plugins/blob/main/staging/maven-plugin/README.md>`_ to push snapshot releases to https://s01.oss.sonatype.org/content/groups/staging/io/gdcc/ and https://s01.oss.sonatype.org/content/groups/staging/org/dataverse/
+
+Add the following to your pom.xml:
+
+.. code-block:: xml
+
+    <version>0.0.1-SNAPSHOT</version>
+
+    <distributionManagement>
+        <snapshotRepository>
+            <id>ossrh</id>
+            <url>https://s01.oss.sonatype.org/content/repositories/snapshots</url>
+        </snapshotRepository>
+        <repository>
+            <id>ossrh</id>
+            <url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/</url>
+        </repository>
+    </distributionManagement>
+
+   <plugin>
+       <groupId>org.sonatype.plugins</groupId>
+       <artifactId>nexus-staging-maven-plugin</artifactId>
+       <version>${nexus-staging.version}</version>
+       <extensions>true</extensions>
+       <configuration>
+           <serverId>ossrh</serverId>
+           <nexusUrl>https://s01.oss.sonatype.org</nexusUrl>
+           <autoReleaseAfterClose>true</autoReleaseAfterClose>
+       </configuration>
+   </plugin>
+
+Configuring Secrets
+~~~~~~~~~~~~~~~~~~~
+
+In GitHub, you will likely need to configure the following secrets:
+
+- DATAVERSEBOT_GPG_KEY
+- DATAVERSEBOT_GPG_PASSWORD
+- DATAVERSEBOT_SONATYPE_TOKEN
+- DATAVERSEBOT_SONATYPE_USERNAME
+
+Note that some of these secrets might be configured at the org level (e.g. gdcc or IQSS).
+
+Many of the automated tasks are performed by the dataversebot account on GitHub: https://github.com/dataversebot
+
 npm (JavaScript/TypeScript)
 ---------------------------
 
 Currently, publishing `@iqss/dataverse-design-system <https://www.npmjs.com/package/@iqss/dataverse-design-system>`_ to npm done manually. We plan to automate this as part of https://github.com/IQSS/dataverse-frontend/issues/140
 
-https://www.npmjs.com/package/js-dataverse is the previous 1.0 version of js-dataverse. No 1.x releases are planned. We plan to publish 2.0 (used by the new frontend) as discussed in https://github.com/IQSS/dataverse-frontend/issues/13
\ No newline at end of file
+https://www.npmjs.com/package/js-dataverse is the previous 1.0 version of js-dataverse. No 1.x releases are planned. We plan to publish 2.0 (used by the new frontend) as discussed in https://github.com/IQSS/dataverse-frontend/issues/13
diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst
index e7a59910e56..e436ba9e9d2 100755
--- a/doc/sphinx-guides/source/developers/making-releases.rst
+++ b/doc/sphinx-guides/source/developers/making-releases.rst
@@ -8,12 +8,12 @@ Making Releases
 Introduction
 ------------
 
-Note: See :doc:`making-library-releases` for how to publish our libraries to Maven Central. 
-
-See :doc:`version-control` for background on our branching strategy.
+This document is about releasing the main Dataverse app (https://github.com/IQSS/dataverse). See :doc:`making-library-releases` for how to release our various libraries. Other projects have their own release documentation.
 
 The steps below describe making both regular releases and hotfix releases.
 
+Below you'll see branches like "develop" and "master" mentioned. For more on our branching strategy, see :doc:`version-control`.
+
 .. _write-release-notes:
 
 Write Release Notes
@@ -24,10 +24,10 @@ Developers express the need for an addition to release notes by creating a "rele
 The task at or near release time is to collect these snippets into a single file.
 
 - Create an issue in GitHub to track the work of creating release notes for the upcoming release.
-- Create a branch, add a .md file for the release (ex. 5.10.1 Release Notes) in ``/doc/release-notes`` and write the release notes, making sure to pull content from the release note snippets mentioned above.
-- Delete the release note snippets as the content is added to the main release notes file.
-- Include instructions to describe the steps required to upgrade the application from the previous version. These must be customized for release numbers and special circumstances such as changes to metadata blocks and infrastructure.
-- Take the release notes .md through the regular Code Review and QA process.
+- Create a branch, add a .md file for the release (ex. 5.10.1 Release Notes) in ``/doc/release-notes`` and write the release notes, making sure to pull content from the release note snippets mentioned above. Snippets may not include any issue number or pull request number in the text so be sure copy the number from the filename of the snippet into the final release note.
+- Delete (``git rm``) the release note snippets as the content is added to the main release notes file.
+- Include instructions describing the steps required to upgrade the application from the previous version. These must be customized for release numbers and special circumstances such as changes to metadata blocks and infrastructure.
+- Take the release notes .md through the regular Code Review and QA process. That is, make a pull request.
 
 Create a GitHub Issue and Branch for the Release
 ------------------------------------------------
@@ -70,6 +70,13 @@ Once important tests have passed (compile, unit tests, etc.), merge the pull req
 
 If this is a hotfix release, skip this whole "merge develop to master" step (the "develop" branch is not involved until later).
 
+Add Milestone to Pull Requests and Issues
+-----------------------------------------
+
+Often someone is making sure that the proper milestone (e.g. 5.10.1) is being applied to pull requests and issues, but sometimes this falls between the cracks.
+
+Check for merged pull requests that have no milestone by going to https://github.com/IQSS/dataverse/pulls and entering `is:pr is:merged no:milestone <https://github.com/IQSS/dataverse/pulls?q=is%3Apr+is%3Amerged+no%3Amilestone>`_ as a query. If you find any, add the milestone to the pull request and any issues it closes. This includes the "merge develop into master" pull request above.
+
 (Optional) Test Docker Images
 -----------------------------
 
@@ -106,7 +113,7 @@ Create a Draft Release on GitHub
 Go to https://github.com/IQSS/dataverse/releases/new to start creating a draft release.
 
 - Under "Choose a tag" you will be creating a new tag. Have it start with a "v" such as ``v5.10.1``. Click "Create new tag on publish".
-- Under "Target" go to "Recent Commits" and select the merge commit from when you merged ``develop`` into ``master`` above. This commit will appear in ``/api/info/version`` from a running installation.
+- Under "Target", choose "master". This commit will appear in ``/api/info/version`` from a running installation.
 - Under "Release title" use the same name as the tag such as ``v5.10.1``.
 - In the description, copy and paste the content from the release notes .md file created in the "Write Release Notes" steps above.
 - Click "Save draft" because we do not want to publish the release yet.
@@ -153,6 +160,7 @@ ssh into the dataverse-internal server and do the following:
 - ``mkdir target``
 - ``cp /tmp/dataverse-5.10.1.war target``
 - ``cd scripts/installer``
+- ``make clean``
 - ``make``
 
 A zip file called ``dvinstall.zip`` should be produced.
@@ -175,7 +183,7 @@ Upload the following artifacts to the draft release you created:
 Deploy on Demo
 --------------
 
-Now that you have the release ready to go, give it one final test by deploying it on https://demo.dataverse.org . Note that this is also an opportunity to re-test the upgrade checklist as described in the release note. 
+Now that you have the release ready to go, consider giving it one final test by deploying it on https://demo.dataverse.org. Note that this is also an opportunity to re-test the upgrade checklist as described in the release note.
 
 Publish the Release
 -------------------
@@ -194,7 +202,7 @@ ssh into the guides server and update the symlink to point to the latest release
   cd /var/www/html/en
   ln -s 5.10.1 latest
 
-
+This step could be done before publishing the release if you'd like to double check that links in the release notes work.
 
 Close Milestone on GitHub and Create a New One
 ----------------------------------------------
diff --git a/doc/sphinx-guides/source/developers/metadataexport.rst b/doc/sphinx-guides/source/developers/metadataexport.rst
index 7f7536fb7f8..63630b64c44 100644
--- a/doc/sphinx-guides/source/developers/metadataexport.rst
+++ b/doc/sphinx-guides/source/developers/metadataexport.rst
@@ -15,8 +15,11 @@ Dataverse instances.
 As of v5.14, Dataverse provides a mechanism for third-party developers to create new metadata Exporters than implement
 new metadata formats or that replace existing formats. All the necessary dependencies are packaged in an interface JAR file
 available from Maven Central. Developers can distribute their new Exporters as JAR files which can be dynamically loaded
-into Dataverse instances - see :ref:`external-exporters`. Developers are encouraged to make their Exporter code available
-via https://github.com/gdcc/dataverse-exporters (or minimally, to list their existence in the README there). 
+into Dataverse instances - see :ref:`external-exporters`. Developers are encouraged to work with the core Dataverse team
+(see :ref:`getting-help-developers`) to distribute these JAR files via Maven Central. See the
+`Croissant <https://central.sonatype.com/artifact/io.gdcc.export/croissant>`_ and
+`Debug <https://central.sonatype.com/artifact/io.gdcc.export/debug>`_ artifacts as examples. You may find other examples
+under :ref:`inventory-of-external-exporters` in the Installation Guide.
 
 Exporter Basics
 ---------------
@@ -63,7 +66,7 @@ If an Exporter cannot create a requested metadata format for some reason, it sho
 Building an Exporter
 --------------------
 
-The example at https://github.com/gdcc/dataverse-exporters provides a Maven pom.xml file suitable for building an Exporter JAR file and that repository provides additional development guidance.
+The examples at https://github.com/gdcc/exporter-croissant and https://github.com/gdcc/exporter-debug provide a Maven pom.xml file suitable for building an Exporter JAR file and those repositories provide additional development guidance.
 
 There are four dependencies needed to build an Exporter:
 
diff --git a/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst b/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst
index 33b8e434e6e..a8f87f13375 100644
--- a/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst
+++ b/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst
@@ -18,7 +18,7 @@ Direct upload involves a series of three activities, each involving interacting
 This API is only enabled when a Dataset is configured with a data store supporting direct S3 upload.
 Administrators should be aware that partial transfers, where a client starts uploading the file/parts of the file and does not contact the server to complete/cancel the transfer, will result in data stored in S3 that is not referenced in the Dataverse installation (e.g. should be considered temporary and deleted.)
 
- 
+
 Requesting Direct Upload of a DataFile
 --------------------------------------
 To initiate a transfer of a file to S3, make a call to the Dataverse installation indicating the size of the file to upload. The response will include a pre-signed URL(s) that allow the client to transfer the file. Pre-signed URLs include a short-lived token authorizing the action represented by the URL.
@@ -29,7 +29,7 @@ To initiate a transfer of a file to S3, make a call to the Dataverse installatio
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV
   export SIZE=1000000000
- 
+
   curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/uploadurls?persistentId=$PERSISTENT_IDENTIFIER&size=$SIZE"
 
 The response to this call, assuming direct uploads are enabled, will be one of two forms:
@@ -71,7 +71,12 @@ The call will return a 400 (BAD REQUEST) response if the file is larger than wha
 
 In the example responses above, the URLs, which are very long, have been omitted. These URLs reference the S3 server and the specific object identifier that will be used, starting with, for example, https://demo-dataverse-bucket.s3.amazonaws.com/10.5072/FK2FOQPJS/177883b000e-49cedef268ac?...
 
-The client must then use the URL(s) to PUT the file, or if the file is larger than the specified partSize, parts of the file. 
+.. _direct-upload-to-s3:
+
+Upload Files to S3
+------------------
+
+The client must then use the URL(s) to PUT the file, or if the file is larger than the specified partSize, parts of the file.
 
 In the single part case, only one call to the supplied URL is required:
 
@@ -88,21 +93,23 @@ Or, if you have disabled S3 tagging (see :ref:`s3-tagging`), you should omit the
 Note that without the ``-i`` flag, you should not expect any output from the command above. With the ``-i`` flag, you should expect to see a "200 OK" response.
 
 In the multipart case, the client must send each part and collect the 'eTag' responses from the server. The calls for this are the same as the one for the single part case except that each call should send a <partSize> slice of the total file, with the last part containing the remaining bytes.
-The responses from the S3 server for these calls will include the 'eTag' for the uploaded part. 
+The responses from the S3 server for these calls will include the 'eTag' for the uploaded part.
 
 To successfully conclude the multipart upload, the client must call the 'complete' URI, sending a json object including the part eTags:
 
 .. code-block:: bash
 
     curl -X PUT "$SERVER_URL/api/datasets/mpload?..." -d '{"1":"<eTag1 string>","2":"<eTag2 string>","3":"<eTag3 string>","4":"<eTag4 string>","5":"<eTag5 string>"}'
-  
+
 If the client is unable to complete the multipart upload, it should call the abort URL:
 
 .. code-block:: bash
-  
+
     curl -X DELETE "$SERVER_URL/api/datasets/mpload?..."
-   
-  
+
+.. note::
+    If you encounter an ``HTTP 501 Not Implemented`` error, ensure the ``Content-Length`` header is correctly set to the file or chunk size. This issue may arise when streaming files or chunks asynchronously to S3 via ``PUT`` requests, particularly if the library or tool you're using doesn't set the ``Content-Length`` header automatically.
+
 .. _direct-add-to-dataset-api:
 
 Adding the Uploaded File to the Dataset
@@ -114,10 +121,10 @@ jsonData normally includes information such as a file description, tags, provena
 * "storageIdentifier" - String, as specified in prior calls
 * "fileName" - String
 * "mimeType" - String
-* fixity/checksum: either: 
+* fixity/checksum: either:
 
   * "md5Hash" - String with MD5 hash value, or
-  * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings 
+  * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings
 
 The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.DataFile.CheckSumType class and currently include MD5, SHA-1, SHA-256, and SHA-512
 
@@ -129,7 +136,7 @@ The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.Data
   export JSON_DATA="{'description':'My description.','directoryLabel':'data/subdir1','categories':['Data'], 'restrict':'false', 'storageIdentifier':'s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42', 'fileName':'file1.txt', 'mimeType':'text/plain', 'checksum': {'@type': 'SHA-1', '@value': '123456'}}"
 
   curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_IDENTIFIER" -F "jsonData=$JSON_DATA"
-  
+
 Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide.
 With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above.
 
@@ -173,10 +180,10 @@ jsonData normally includes information such as a file description, tags, provena
 * "storageIdentifier" - String, as specified in prior calls
 * "fileName" - String
 * "mimeType" - String
-* fixity/checksum: either: 
+* fixity/checksum: either:
 
   * "md5Hash" - String with MD5 hash value, or
-  * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings 
+  * "checksum" - Json Object with "@type" field specifying the algorithm used and "@value" field with the value from that algorithm, both Strings
 
 The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.DataFile.CheckSumType class and currently include MD5, SHA-1, SHA-256, and SHA-512.
 Note that the API call does not validate that the file matches the hash value supplied. If a Dataverse instance is configured to validate file fixity hashes at publication time, a mismatch would be caught at that time and cause publication to fail.
@@ -189,7 +196,7 @@ Note that the API call does not validate that the file matches the hash value su
   export JSON_DATA='{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "forceReplace":"true", "storageIdentifier":"s3://demo-dataverse-bucket:176e28068b0-1c3f80357c42", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "SHA-1", "@value": "123456"}}'
 
   curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/files/$FILE_IDENTIFIER/replace" -F "jsonData=$JSON_DATA"
-  
+
 Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide.
 With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above.
 
diff --git a/doc/sphinx-guides/source/developers/tips.rst b/doc/sphinx-guides/source/developers/tips.rst
index 839ae3aa19d..f5ffbac0c07 100755
--- a/doc/sphinx-guides/source/developers/tips.rst
+++ b/doc/sphinx-guides/source/developers/tips.rst
@@ -94,23 +94,63 @@ Then configure the JVM option mentioned in :ref:`install-imagemagick` to the pat
 Database Schema Exploration
 ---------------------------
 
-With over 100 tables, the Dataverse Software PostgreSQL database ("dvndb") can be somewhat daunting for newcomers. Here are some tips for coming up to speed. (See also the :doc:`sql-upgrade-scripts` section.)
+With over 100 tables, the Dataverse PostgreSQL database can be somewhat daunting for newcomers. Here are some tips for coming up to speed. (See also the :doc:`sql-upgrade-scripts` section.)
+
+.. _db-name-creds:
+
+Database Name and Credentials
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The default database name and credentials depends on how you set up your dev environment.
+
+.. list-table::
+   :header-rows: 1
+   :align: left
+
+   * - MPCONFIG Key
+     - Docker
+     - Classic
+   * - dataverse.db.name
+     - ``dataverse``
+     - ``dvndb``
+   * - dataverse.db.user
+     - ``dataverse``
+     - ``dvnapp``
+   * - dataverse.db.password
+     - ``secret``
+     - ``secret``
+
+Here's an example of using these credentials from within the PostgreSQL container (see :doc:`/container/index`):
+
+.. code-block:: bash
+
+    pdurbin@beamish dataverse % docker exec -it postgres-1 bash
+    root@postgres:/# export PGPASSWORD=secret
+    root@postgres:/# psql -h localhost -U dataverse dataverse
+    psql (16.3 (Debian 16.3-1.pgdg120+1))
+    Type "help" for help.
+    
+    dataverse=# select id,alias from dataverse limit 1;
+     id | alias 
+    ----+-------
+      1 | root
+    (1 row)
+
+See also :ref:`database-persistence` in the Installation Guide.
 
 pgAdmin
-~~~~~~~~
+~~~~~~~
 
-Back in the :doc:`classic-dev-env` section, we had you install pgAdmin, which can help you explore the tables and execute SQL commands. It's also listed in the :doc:`tools` section.
+If you followed the :doc:`classic-dev-env` section, we had you install pgAdmin, which can help you explore the tables and execute SQL commands. It's also listed in the :doc:`tools` section.
 
 SchemaSpy
 ~~~~~~~~~
 
 SchemaSpy is a tool that creates a website of entity-relationship diagrams based on your database.
 
-As part of our build process for running integration tests against the latest code in the "develop" branch, we drop the database on the "phoenix" server, recreate the database by deploying the latest war file, and run SchemaSpy to create the following site: http://phoenix.dataverse.org/schemaspy/latest/relationships.html
+We periodically run SchemaSpy and publish the output: https://guides.dataverse.org/en/6.2/schemaspy/index.html
 
-To run this command on your laptop, download SchemaSpy and take a look at the syntax in ``scripts/deploy/phoenix.dataverse.org/post``
-
-To read more about the phoenix server, see the :doc:`testing` section.
+To run SchemaSpy locally, take a look at the syntax in ``scripts/deploy/phoenix.dataverse.org/post``.
 
 Deploying With ``asadmin``
 --------------------------
diff --git a/doc/sphinx-guides/source/developers/version-control.rst b/doc/sphinx-guides/source/developers/version-control.rst
index 8648c8ce2a0..127955a44ea 100644
--- a/doc/sphinx-guides/source/developers/version-control.rst
+++ b/doc/sphinx-guides/source/developers/version-control.rst
@@ -137,17 +137,29 @@ Make a Pull Request
 ~~~~~~~~~~~~~~~~~~~
 
 Make a pull request to get approval to merge your changes into the develop branch.
-If the pull request notes indicate that release notes are necessary, the workflow can then verify the existence of a corresponding file and respond with a 'thank you!' message. On the other hand, if no release notes are detected, the contributor can be gently reminded of their absence. Please see :doc:`making-releases` for guidance on writing release notes.
-Note that once a pull request is created, we'll remove the corresponding issue from our kanban board so that we're only tracking one card.
 
-Feedback on the pull request template we use is welcome! Here's an example of a pull request for issue #3827: https://github.com/IQSS/dataverse/pull/3827
+Feedback on the pull request template we use is welcome!
+
+Here's an example of a pull request for issue #9729: https://github.com/IQSS/dataverse/pull/10474
+
+Replace Issue with Pull Request
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If the pull request closes an issue that has been prioritized, someone from the core team will do the following:
+
+- Move the open issue to the "Done" column of the `project board`_. We do this to track only one card, the pull request, on the project board. Merging the pull request will close the issue because we use the "closes #1234" `keyword <https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue>`_ .
+- Copy all labels from the issue to the pull request with the exception of the "size" label.
+- Add a size label to the pull request that reflects the amount of review and QA time needed.
+- Move the pull request to the "Ready for Review" column.
+
+.. _project board: https://github.com/orgs/IQSS/projects/34
 
 Make Sure Your Pull Request Has Been Advanced to Code Review
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Now that you've made your pull request, your goal is to make sure it appears in the "Code Review" column at https://github.com/orgs/IQSS/projects/34.
+Now that you've made your pull request, your goal is to make sure it appears in the "Code Review" column on the `project board`_.
 
-Look at https://github.com/IQSS/dataverse/blob/master/CONTRIBUTING.md for various ways to reach out to developers who have enough access to the GitHub repo to move your issue and pull request to the "Code Review" column.
+Look at :ref:`getting-help-developers` for various ways to reach out to developers who have enough access to the GitHub repo to move your issue and pull request to the "Code Review" column.
 
 Summary of Git commands
 ~~~~~~~~~~~~~~~~~~~~~~~
diff --git a/doc/sphinx-guides/source/installation/advanced.rst b/doc/sphinx-guides/source/installation/advanced.rst
index 3de5d0ea07c..bee289ecd5b 100644
--- a/doc/sphinx-guides/source/installation/advanced.rst
+++ b/doc/sphinx-guides/source/installation/advanced.rst
@@ -119,27 +119,29 @@ To activate in your Dataverse installation::
 
 .. _external-exporters:
 
-Installing External Metadata Exporters
-++++++++++++++++++++++++++++++++++++++
+External Metadata Exporters
++++++++++++++++++++++++++++
 
-As of Dataverse Software 5.14 Dataverse supports the use of external Exporters as a way to add additional metadata
-export formats to Dataverse or replace the built-in formats. This should be considered an **experimental** capability
-in that the mechanism is expected to evolve and using it may require additional effort when upgrading to new Dataverse
-versions.
+Dataverse 5.14+ supports the configuration of external metadata exporters (just "external exporters" or "exporters" for short) as a way to add additional metadata export formats or replace built-in formats. For a list of built-in formats, see :ref:`metadata-export-formats` in the User Guide.
 
-This capability is enabled by specifying a directory in which Dataverse should look for third-party Exporters. See
-:ref:`dataverse.spi.exporters.directory`.
+This should be considered an **experimental** capability in that the mechanism is expected to evolve and using it may require additional effort when upgrading to new Dataverse versions.
 
-See :doc:`/developers/metadataexport` for details about how to develop new Exporters.
+Enabling External Exporters
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-An minimal example Exporter is available at https://github.com/gdcc/dataverse-exporters. The community is encourage to 
-add additional exporters (and/or links to exporters elsewhere) in this repository. Once you have downloaded the 
-dataverse-spi-export-examples-1.0.0.jar (or other exporter jar), installed it in the directory specified above, and 
-restarted your Payara server, the new exporter should be available. 
+Use the :ref:`dataverse.spi.exporters.directory` configuration option to specify a directory from which external exporters (JAR files) should be loaded.
 
-The example dataverse-spi-export-examples-1.0.0.jar replaces the ``JSON`` export with a ``MyJSON in <locale>`` version
-that just wraps the existing JSON export object in a new JSON object with the key ``inputJson`` containing the original
-JSON.(Note that the ``MyJSON in <locale>`` label will appear in the dataset Metadata Export download menu immediately,
-but the content for already published datasets will only be updated after you delete the cached exports and/or use a
-reExport API call (see :ref:`batch-exports-through-the-api`).)
+.. _inventory-of-external-exporters:
 
+Inventory of External Exporters
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+For a list of external exporters, see the README at https://github.com/gdcc/dataverse-exporters. To highlight a few:
+
+- Croissant
+- RO-Crate
+
+Developing New Exporters
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+See :doc:`/developers/metadataexport` for details about how to develop new exporters.
diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst
index ba92a4180ae..ae3b07fecfe 100644
--- a/doc/sphinx-guides/source/installation/config.rst
+++ b/doc/sphinx-guides/source/installation/config.rst
@@ -1292,8 +1292,8 @@ Reported Working S3-Compatible Storage
  Note that for direct uploads and downloads, Dataverse redirects to the proxy-url but presigns the urls based on the ``dataverse.files.<id>.custom-endpoint-url``. Also, note that if you choose to enable ``dataverse.files.<id>.download-redirect`` the S3 URLs expire after 60 minutes by default. You can change that minute value to reflect a timeout value that’s more appropriate by using ``dataverse.files.<id>.url-expiration-minutes``.
 
 `Surf Object Store v2019-10-30 <https://www.surf.nl/en>`_
-  Set ``dataverse.files.<id>.payload-signing=true`` and ``dataverse.files.<id>.chunked-encoding=false`` to use Surf Object
-  Store.
+  Set ``dataverse.files.<id>.payload-signing=true``, ``dataverse.files.<id>.chunked-encoding=false`` and ``dataverse.files.<id>.path-style-request=true`` to use Surf Object
+  Store. You will need the Swift client (documented at <http://doc.swift.surfsara.nl/en/latest/Pages/Clients/s3cred.html>) to create the access key and secret key for the S3 interface.
 
 Note that the ``dataverse.files.<id>.proxy-url`` setting can be used in installations where the object store is proxied, but it should be considered an advanced option that will require significant expertise to properly configure. 
 For direct uploads and downloads, Dataverse redirects to the proxy-url but presigns the urls based on the ``dataverse.files.<id>.custom-endpoint-url``.
@@ -3190,12 +3190,19 @@ Can also be set via any `supported MicroProfile Config API source`_, e.g. the en
 dataverse.spi.exporters.directory
 +++++++++++++++++++++++++++++++++
 
-This JVM option is used to configure the file system path where external Exporter JARs can be placed. See :ref:`external-exporters` for more information.
+For some background, see :ref:`external-exporters` and :ref:`inventory-of-external-exporters`.
 
-``./asadmin create-jvm-options '-Ddataverse.spi.exporters.directory=PATH_LOCATION_HERE'``
+This JVM option is used to configure the file system path where external exporter JARs should be loaded from. For example:
 
-If this value is set, Dataverse will examine all JARs in the specified directory and will use them to add, or replace existing, metadata export formats.
-If this value is not set (the default), Dataverse will not use external Exporters.
+``./asadmin create-jvm-options '-Ddataverse.spi.exporters.directory=/var/lib/dataverse/exporters'``
+
+If this value is set, Dataverse will examine all JARs in the specified directory and will use them to add new metadata export formats or (if the machine-readable name used in :ref:`export-dataset-metadata-api` is the same) replace built-in metatadata export formats.
+
+If this value is not set (the default), Dataverse will not load any external exporters.
+
+If you place a new JAR in this directory, you must restart Payara for Dataverse to load it.
+
+If the JAR is for an exporter that replaces built-in format, you must delete the cached exports and/or use a reExport API call (see :ref:`batch-exports-through-the-api`) for the new format to be visible for existing datasets.
 
 Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_SPI_EXPORTERS_DIRECTORY``.
 
diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst
index 151d44e6841..f61321ef245 100644
--- a/doc/sphinx-guides/source/installation/prerequisites.rst
+++ b/doc/sphinx-guides/source/installation/prerequisites.rst
@@ -428,7 +428,7 @@ firewalled from your Dataverse installation host).
 Counter Processor
 -----------------
 
-Counter Processor is required to enable Make Data Count metrics in a Dataverse installation. See the :doc:`/admin/make-data-count` section of the Admin Guide for a description of this feature. Counter Processor is open source and we will be downloading it from https://github.com/CDLUC3/counter-processor
+Counter Processor is required to enable Make Data Count metrics in a Dataverse installation. See the :doc:`/admin/make-data-count` section of the Admin Guide for a description of this feature. Counter Processor is open source and we will be downloading it from https://github.com/gdcc/counter-processor
 
 Installing Counter Processor
 ============================
@@ -438,9 +438,9 @@ A scripted installation using Ansible is mentioned in the :doc:`/developers/make
 As root, download and install Counter Processor::
 
         cd /usr/local
-        wget https://github.com/CDLUC3/counter-processor/archive/v0.1.04.tar.gz
-        tar xvfz v0.1.04.tar.gz
-        cd /usr/local/counter-processor-0.1.04
+        wget https://github.com/gdcc/counter-processor/archive/refs/tags/v1.05.tar.gz
+        tar xvfz v1.05.tar.gz
+        cd /usr/local/counter-processor-1.05
 
 Installing GeoLite Country Database
 ===================================
@@ -451,7 +451,7 @@ The process required to sign up, download the database, and to configure automat
 
 As root, change to the Counter Processor directory you just created, download the GeoLite2-Country tarball from MaxMind, untar it, and copy the geoip database into place::
 
-        <download or move the GeoLite2-Country.tar.gz to the /usr/local/counter-processor-0.1.04 directory>
+        <download or move the GeoLite2-Country.tar.gz to the /usr/local/counter-processor-1.05 directory>
         tar xvfz GeoLite2-Country.tar.gz
         cp GeoLite2-Country_*/GeoLite2-Country.mmdb maxmind_geoip
 
@@ -461,12 +461,12 @@ Creating a counter User
 As root, create a "counter" user and change ownership of Counter Processor directory to this new user::
 
         useradd counter
-        chown -R counter:counter /usr/local/counter-processor-0.1.04
+        chown -R counter:counter /usr/local/counter-processor-1.05
 
 Installing Counter Processor Python Requirements
 ================================================
 
-Counter Processor version 0.1.04 requires Python 3.7 or higher. This version of Python is available in many operating systems, and is purportedly available for RHEL7 or CentOS 7 via Red Hat Software Collections. Alternately, one may compile it from source.
+Counter Processor version 1.05 requires Python 3.7 or higher. This version of Python is available in many operating systems, and is purportedly available for RHEL7 or CentOS 7 via Red Hat Software Collections. Alternately, one may compile it from source.
 
 The following commands are intended to be run as root but we are aware that Pythonistas might prefer fancy virtualenv or similar setups. Pull requests are welcome to improve these steps!
 
@@ -477,7 +477,7 @@ Install Python 3.9::
 Install Counter Processor Python requirements::
 
         python3.9 -m ensurepip
-        cd /usr/local/counter-processor-0.1.04
+        cd /usr/local/counter-processor-1.05
         pip3 install -r requirements.txt
 
 See the :doc:`/admin/make-data-count` section of the Admin Guide for how to configure and run Counter Processor.
diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst
index a1e214589e3..6852b60575b 100755
--- a/doc/sphinx-guides/source/user/dataset-management.rst
+++ b/doc/sphinx-guides/source/user/dataset-management.rst
@@ -25,7 +25,7 @@ For more details about what Citation and Domain Specific Metadata is supported p
 Supported Metadata Export Formats
 ---------------------------------
 
-Once a dataset has been published, its metadata can be exported in a variety of other metadata standards and formats, which help make datasets more discoverable and usable in other systems, such as other data repositories. On each dataset page's metadata tab, the following exports are available:
+Once a dataset has been published, its metadata can be exported in a variety of other metadata standards and formats, which help make datasets more :doc:`discoverable </admin/discoverability>` and usable in other systems, such as other data repositories. On each dataset page's metadata tab, the following exports are available:
 
 - Dublin Core
 - DDI (Data Documentation Initiative Codebook 2.5)
@@ -36,6 +36,11 @@ Once a dataset has been published, its metadata can be exported in a variety of
 - OpenAIRE
 - Schema.org JSON-LD
 
+Additional formats can be enabled. See :ref:`inventory-of-external-exporters` in the Installation Guide. To highlight a few:
+
+- Croissant
+- RO-Crate
+
 Each of these metadata exports contains the metadata of the most recently published version of the dataset.
 
 .. _adding-new-dataset:
diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml
index e6ffc9f392a..33e7b52004b 100644
--- a/docker/compose/demo/compose.yml
+++ b/docker/compose/demo/compose.yml
@@ -9,7 +9,7 @@ services:
     restart: on-failure
     user: payara
     environment:
-      _CT_DATAVERSE_SITEURL: "https://demo.example.org"
+      DATAVERSE_SITEURL: "https://demo.example.org"
       DATAVERSE_DB_HOST: postgres
       DATAVERSE_DB_PASSWORD: secret
       DATAVERSE_DB_USER: dataverse
diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile
index 93f9fa4f0c1..29078e6896c 100644
--- a/modules/container-base/src/main/docker/Dockerfile
+++ b/modules/container-base/src/main/docker/Dockerfile
@@ -41,11 +41,18 @@ ENV PAYARA_DIR="${HOME_DIR}/appserver" \
     STORAGE_DIR="/dv" \
     SECRETS_DIR="/secrets" \
     DUMPS_DIR="/dumps" \
-    PASSWORD_FILE="${HOME_DIR}/passwordFile" \
-    ADMIN_USER="admin" \
-    ADMIN_PASSWORD="admin" \
+    PAYARA_ADMIN_USER="admin" \
+    # This is a public default, easy to change via this env var at runtime
+    PAYARA_ADMIN_PASSWORD="admin" \
     DOMAIN_NAME="domain1" \
-    PAYARA_ARGS=""
+    # This is the public default as per https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Server%20Documentation/Security%20Guide/Administering%20System%20Security.html#to-change-the-master-password
+    # Can be changed at runtime via this env var
+    DOMAIN_PASSWORD="changeit" \
+    PAYARA_ARGS="" \
+    LINUX_USER="payara" \
+    LINUX_GROUP="payara" \
+    # This is a public default and can be changed at runtime using this env var
+    LINUX_PASSWORD="payara"
 ENV PATH="${PATH}:${PAYARA_DIR}/bin:${SCRIPT_DIR}" \
     DOMAIN_DIR="${PAYARA_DIR}/glassfish/domains/${DOMAIN_NAME}" \
     DEPLOY_PROPS="" \
@@ -69,6 +76,10 @@ ENV PATH="${PATH}:${PAYARA_DIR}/bin:${SCRIPT_DIR}" \
 ### PART 1: SYSTEM ###
 ARG UID=1000
 ARG GID=1000
+# Auto-populated by BuildKit / buildx
+#ARG TARGETARCH="amd64"
+ARG TARGETARCH
+
 USER root
 WORKDIR /
 SHELL ["/bin/bash", "-euo", "pipefail", "-c"]
@@ -78,23 +89,25 @@ RUN <<EOF
     # Create pathes
     mkdir -p "${HOME_DIR}" "${PAYARA_DIR}" "${DEPLOY_DIR}" "${CONFIG_DIR}" "${SCRIPT_DIR}"
     mkdir -p "${STORAGE_DIR}" "${SECRETS_DIR}" "${DUMPS_DIR}"
+    # Remove the default user if present (do not fail build if not, introduced by Ubuntu 24.04)
+    userdel --force --remove ubuntu || true
+    groupdel -f ubuntu || true # for some reason, groupdel on Ubuntu 22.04 does not like --force
     # Create user
-    addgroup --gid ${GID} payara
-    adduser --system --uid ${UID} --no-create-home --shell /bin/bash --home "${HOME_DIR}" --gecos "" --ingroup payara payara
-    echo payara:payara | chpasswd
+    groupadd --gid "${GID}" "${LINUX_GROUP}"
+    useradd --system --uid "${UID}" --no-create-home --shell /bin/false --home "${HOME_DIR}" --gid "${LINUX_GROUP}" "${LINUX_USER}"
+    echo "${LINUX_USER}:$LINUX_PASSWORD" | chpasswd
     # Set permissions
     # Note: Following OpenShift best practices for arbitrary user id support:
     #       https://docs.openshift.com/container-platform/4.14/openshift_images/create-images.html#use-uid_create-images
-    chown -R payara:0 "${HOME_DIR}" "${STORAGE_DIR}" "${SECRETS_DIR}" "${DUMPS_DIR}"
+    chown -R "${LINUX_USER}:0" "${HOME_DIR}" "${STORAGE_DIR}" "${SECRETS_DIR}" "${DUMPS_DIR}"
     chmod -R g=u "${HOME_DIR}" "${STORAGE_DIR}" "${SECRETS_DIR}" "${DUMPS_DIR}"
-
 EOF
 
-ARG JATTACH_VERSION="v2.1"
-ARG JATTACH_CHECKSUM="07885fdc782e02e7302c6d190f54c3930afa10a38140365adf54076ec1086a8e"
-ARG WAIT_FOR_VERSION="v2.2.3"
-ARG WAIT_FOR_CHECKSUM="70271181be69cd2c7265b2746f97fccfd7e8aa1059894138a775369c23589ff4"
-ARG PKGS="jq imagemagick curl unzip wget acl dirmngr gpg lsof procps netcat dumb-init"
+ARG JATTACH_VERSION="v2.2"
+ARG JATTACH_TGZ_CHECKSUM_AMD64="acd9e17f15749306be843df392063893e97bfecc5260eef73ee98f06e5cfe02f"
+ARG JATTACH_TGZ_CHECKSUM_ARM64="288ae5ed87ee7fe0e608c06db5a23a096a6217c9878ede53c4e33710bdcaab51"
+ARG WAIT4X_VERSION="v2.14.0"
+ARG PKGS="jq imagemagick curl unzip wget acl lsof procps netcat-openbsd dumb-init"
 
 # Installing the packages in an extra container layer for better caching
 RUN <<EOF
@@ -103,30 +116,33 @@ RUN <<EOF
     apt-get install -qqy --no-install-recommends ${PKGS}
     rm -rf "/var/lib/apt/lists/*"
 
-    # Install jattach
-    curl -sSfL -o /usr/bin/jattach "https://github.com/apangin/jattach/releases/download/${JATTACH_VERSION}/jattach"
-    echo "${JATTACH_CHECKSUM} /usr/bin/jattach" | sha256sum -c -
-    chmod +x /usr/bin/jattach
+    # Install jattach & wait4x
+    if [ "${TARGETARCH}" = "amd64" ]; then
+      curl -sSfL -o /usr/bin/jattach.tgz "https://github.com/jattach/jattach/releases/download/${JATTACH_VERSION}/jattach-linux-x64.tgz"
+      echo "${JATTACH_TGZ_CHECKSUM_AMD64} /usr/bin/jattach.tgz" | sha256sum -c -
+    elif [ "${TARGETARCH}" = "arm64" ]; then
+      curl -sSfL -o /usr/bin/jattach.tgz "https://github.com/jattach/jattach/releases/download/${JATTACH_VERSION}/jattach-linux-arm64.tgz"
+      echo "${JATTACH_TGZ_CHECKSUM_ARM64} /usr/bin/jattach.tgz" | sha256sum -c -
+    fi
+    tar -xzf /usr/bin/jattach.tgz -C /usr/bin && chmod +x /usr/bin/jattach
 
-    # Install wait-for
-    curl -sSfL -o /usr/bin/wait-for "https://github.com/eficode/wait-for/releases/download/${WAIT_FOR_VERSION}/wait-for"
-    echo "${WAIT_FOR_CHECKSUM} /usr/bin/wait-for" | sha256sum -c -
-    chmod +x /usr/bin/wait-for
+    # Install wait4x
+    curl -sSfL -o /usr/bin/wait4x.tar.gz "https://github.com/atkrad/wait4x/releases/download/${WAIT4X_VERSION}/wait4x-linux-${TARGETARCH}.tar.gz"
+    curl -sSfL -o /tmp/w4x-checksum "https://github.com/atkrad/wait4x/releases/download/${WAIT4X_VERSION}/wait4x-linux-${TARGETARCH}.tar.gz.sha256sum"
+    echo "$(cat /tmp/w4x-checksum | cut -f1 -d" ") /usr/bin/wait4x.tar.gz" | sha256sum -c -
+    tar -xzf /usr/bin/wait4x.tar.gz -C /usr/bin && chmod +x /usr/bin/wait4x
 EOF
 
 ### PART 2: PAYARA ###
 # After setting up system, now configure Payara
-
-ARG ASADMIN="${PAYARA_DIR}/bin/asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE}"
-
-USER payara
+USER ${LINUX_USER}
 WORKDIR ${HOME_DIR}
 
 # Copy Payara from build context (cached by Maven)
-COPY --chown=payara:payara maven/appserver ${PAYARA_DIR}/
+COPY --chown=${LINUX_USER}:${LINUX_GROUP} maven/appserver ${PAYARA_DIR}/
 
 # Copy the system (appserver level) scripts like entrypoint, etc
-COPY --chown=payara:payara maven/scripts ${SCRIPT_DIR}/
+COPY --chown=${LINUX_USER}:${LINUX_USER} maven/scripts ${SCRIPT_DIR}/
 
 # Configure the domain to be container and production ready
 # -- This is mostly inherited from the "production domain template", experience with Dataverse and
@@ -134,9 +150,14 @@ COPY --chown=payara:payara maven/scripts ${SCRIPT_DIR}/
 RUN <<EOF
     # Set admin password
     echo "AS_ADMIN_PASSWORD=" > /tmp/password-change-file.txt
-    echo "AS_ADMIN_NEWPASSWORD=${ADMIN_PASSWORD}" >> /tmp/password-change-file.txt
-    echo "AS_ADMIN_PASSWORD=${ADMIN_PASSWORD}" >> ${PASSWORD_FILE}
-    asadmin --user=${ADMIN_USER} --passwordfile=/tmp/password-change-file.txt change-admin-password --domain_name=${DOMAIN_NAME}
+    echo "AS_ADMIN_NEWPASSWORD=${PAYARA_ADMIN_PASSWORD}" >> /tmp/password-change-file.txt
+    asadmin --user=${PAYARA_ADMIN_USER} --passwordfile=/tmp/password-change-file.txt change-admin-password --domain_name=${DOMAIN_NAME}
+
+    # Prepare shorthand
+    PASSWORD_FILE=$(mktemp)
+    echo "AS_ADMIN_PASSWORD=${PAYARA_ADMIN_PASSWORD}" >> ${PASSWORD_FILE}
+    ASADMIN="${PAYARA_DIR}/bin/asadmin --user=${PAYARA_ADMIN_USER} --passwordfile=${PASSWORD_FILE}"
+
     # Start domain for configuration
     ${ASADMIN} start-domain ${DOMAIN_NAME}
     # Allow access to admin with password only
@@ -213,6 +234,7 @@ RUN <<EOF
     ${SCRIPT_DIR}/removeExpiredCaCerts.sh
     # Delete generated files
     rm -rf \
+        "$PASSWORD_FILE" \
         "/tmp/password-change-file.txt" \
         "${PAYARA_DIR}/glassfish/domains/${DOMAIN_NAME}/osgi-cache" \
         "${PAYARA_DIR}/glassfish/domains/${DOMAIN_NAME}/logs"
@@ -224,6 +246,7 @@ USER root
 RUN true && \
     chgrp -R 0 "${DOMAIN_DIR}" && \
     chmod -R g=u "${DOMAIN_DIR}"
+USER ${LINUX_USER}
 
 # Set the entrypoint to tini (as a process supervisor)
 ENTRYPOINT ["/usr/bin/dumb-init", "--"]
diff --git a/modules/container-base/src/main/docker/scripts/entrypoint.sh b/modules/container-base/src/main/docker/scripts/entrypoint.sh
index bd7031db9f0..ed3b8ea9aa4 100644
--- a/modules/container-base/src/main/docker/scripts/entrypoint.sh
+++ b/modules/container-base/src/main/docker/scripts/entrypoint.sh
@@ -12,10 +12,14 @@
 
 # We do not define these variables within our Dockerfile so the location can be changed when trying to avoid
 # writes to the overlay filesystem. (CONFIG_DIR is defined within the Dockerfile, but might be overridden.)
-${PREBOOT_COMMANDS:="${CONFIG_DIR}/pre-boot-commands.asadmin"}
-export PREBOOT_COMMANDS
-${POSTBOOT_COMMANDS:="${CONFIG_DIR}/post-boot-commands.asadmin"}
-export POSTBOOT_COMMANDS
+PREBOOT_COMMANDS_FILE=${PREBOOT_COMMANDS:-"${CONFIG_DIR}/pre-boot-commands.asadmin"}
+export PREBOOT_COMMANDS_FILE
+POSTBOOT_COMMANDS_FILE=${POSTBOOT_COMMANDS:-"${CONFIG_DIR}/post-boot-commands.asadmin"}
+export POSTBOOT_COMMANDS_FILE
+
+# Remove existing POSTBOOT/PREBOOT files if they exist. Anything to be done needs to be injected by a script
+rm -rf "$POSTBOOT_COMMANDS_FILE" || exit 1
+rm -rf "$PREBOOT_COMMANDS_FILE" || exit 1
 
 # Execute any scripts BEFORE the appserver starts
 for f in "${SCRIPT_DIR}"/init_* "${SCRIPT_DIR}"/init.d/*; do
diff --git a/modules/container-base/src/main/docker/scripts/init_1_change_passwords.sh b/modules/container-base/src/main/docker/scripts/init_1_change_passwords.sh
new file mode 100644
index 00000000000..0bf9d0b80fb
--- /dev/null
+++ b/modules/container-base/src/main/docker/scripts/init_1_change_passwords.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+set -euo pipefail
+
+# NOTE: ALL PASSWORD ENV VARS WILL BE SCRAMBLED IN startInForeground.sh FOR SECURITY!
+#       This is to avoid possible attack vectors where someone could extract the sensitive information
+#       from within an env var dump inside an application!
+
+# Someone set the env var for passwords - get the new password in. Otherwise print warning.
+# https://docs.openshift.com/container-platform/4.14/openshift_images/create-images.html#avoid-default-passwords
+if [ "$LINUX_PASSWORD" != "payara" ]; then
+  echo -e "$LINUX_USER\n$LINUX_PASSWORD\n$LINUX_PASSWORD" | passwd
+else
+  echo "IMPORTANT: THIS CONTAINER USES THE DEFAULT PASSWORD FOR USER \"${LINUX_USER}\"! ('payara')"
+  echo "           To change the password, set the LINUX_PASSWORD env var."
+fi
+
+# Change the domain admin password if necessary
+if [ "$PAYARA_ADMIN_PASSWORD" != "admin" ]; then
+  PASSWORD_FILE=$(mktemp)
+  echo "AS_ADMIN_PASSWORD=admin" > "$PASSWORD_FILE"
+  echo "AS_ADMIN_NEWPASSWORD=${PAYARA_ADMIN_PASSWORD}" >> "$PASSWORD_FILE"
+  asadmin --user="${PAYARA_ADMIN_USER}" --passwordfile="$PASSWORD_FILE" change-admin-password --domain_name="${DOMAIN_NAME}"
+  rm "$PASSWORD_FILE"
+else
+  echo "IMPORTANT: THIS CONTAINER USES THE DEFAULT PASSWORD FOR PAYARA ADMIN \"${PAYARA_ADMIN_USER}\"! ('admin')"
+  echo "           To change the password, set the PAYARA_ADMIN_PASSWORD env var."
+fi
+
+# Change the domain master password if necessary
+# > The master password is not tied to a user account, and it is not used for authentication.
+# > Instead, Payara Server strictly uses the master password to ONLY encrypt the keystore and truststore used to store keys and certificates for the DAS and instances usage.
+# It will be requested when booting the application server!
+# https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Server%20Documentation/Security%20Guide/Administering%20System%20Security.html#to-change-the-master-password
+if [ "$DOMAIN_PASSWORD" != "changeit" ]; then
+  PASSWORD_FILE=$(mktemp)
+  echo "AS_ADMIN_MASTERPASSWORD=changeit" >> "$PASSWORD_FILE"
+  echo "AS_ADMIN_NEWMASTERPASSWORD=${DOMAIN_PASSWORD}" >> "$PASSWORD_FILE"
+  asadmin --user="${PAYARA_ADMIN_USER}" --passwordfile="$PASSWORD_FILE" change-master-password --savemasterpassword false "${DOMAIN_NAME}"
+  rm "$PASSWORD_FILE"
+else
+  echo "IMPORTANT: THIS CONTAINER USES THE DEFAULT DOMAIN \"MASTER\" PASSWORD! ('changeit')"
+  echo "           To change the password, set the DOMAIN_PASSWORD env var."
+fi
diff --git a/modules/container-base/src/main/docker/scripts/init_1_generate_deploy_commands.sh b/modules/container-base/src/main/docker/scripts/init_1_generate_deploy_commands.sh
index 161f10caebf..622ea82d6f6 100644
--- a/modules/container-base/src/main/docker/scripts/init_1_generate_deploy_commands.sh
+++ b/modules/container-base/src/main/docker/scripts/init_1_generate_deploy_commands.sh
@@ -35,12 +35,11 @@ set -euo pipefail
 
 # Check required variables are set
 if [ -z "$DEPLOY_DIR" ]; then echo "Variable DEPLOY_DIR is not set."; exit 1; fi
-if [ -z "$PREBOOT_COMMANDS" ]; then echo "Variable PREBOOT_COMMANDS is not set."; exit 1; fi
-if [ -z "$POSTBOOT_COMMANDS" ]; then echo "Variable POSTBOOT_COMMANDS is not set."; exit 1; fi
-
-# Create pre and post boot command files if they don't exist
-touch "$POSTBOOT_COMMANDS"
-touch "$PREBOOT_COMMANDS"
+if [ -z "$PREBOOT_COMMANDS_FILE" ]; then echo "Variable PREBOOT_COMMANDS_FILE is not set."; exit 1; fi
+if [ -z "$POSTBOOT_COMMANDS_FILE" ]; then echo "Variable POSTBOOT_COMMANDS_FILE is not set."; exit 1; fi
+# Test if files are writeable for us, exit otherwise
+touch "$PREBOOT_COMMANDS_FILE" || exit 1
+touch "$POSTBOOT_COMMANDS_FILE" || exit 1
 
 deploy() {
 
@@ -50,14 +49,14 @@ deploy() {
   fi
 
   DEPLOY_STATEMENT="deploy $DEPLOY_PROPS $1"
-  if grep -q "$1" "$POSTBOOT_COMMANDS"; then
-    echo "post boot commands already deploys $1";
+  if grep -q "$1" "$POSTBOOT_COMMANDS_FILE"; then
+    echo "Post boot commands already deploys $1, skip adding";
   else
     if [ -n "$SKIP_DEPLOY" ] && { [ "$SKIP_DEPLOY" = "1" ] || [ "$SKIP_DEPLOY" = "true" ]; }; then
       echo "Skipping deployment of $1 as requested.";
     else
       echo "Adding deployment target $1 to post boot commands";
-      echo "$DEPLOY_STATEMENT" >> "$POSTBOOT_COMMANDS";
+      echo "$DEPLOY_STATEMENT" >> "$POSTBOOT_COMMANDS_FILE";
     fi
   fi
 }
diff --git a/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh b/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh
index 016151168d5..608113d1cf7 100644
--- a/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh
+++ b/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh
@@ -11,39 +11,49 @@ set -euo pipefail
 # for the parent shell before executing Payara.
 ###### ###### ###### ###### ###### ###### ###### ###### ###### ###### ######
 
+if [ -z "$PREBOOT_COMMANDS_FILE" ]; then echo "Variable PREBOOT_COMMANDS_FILE is not set."; exit 1; fi
+# Test if preboot file is writeable for us, exit otherwise
+touch "$PREBOOT_COMMANDS_FILE" || exit 1
+
 # 0. Init variables
 ENABLE_JMX=${ENABLE_JMX:-0}
 ENABLE_JDWP=${ENABLE_JDWP:-0}
 ENABLE_RELOAD=${ENABLE_RELOAD:-0}
 
-DV_PREBOOT=${CONFIG_DIR}/dataverse_preboot
-echo "# Dataverse preboot configuration for Payara" > "${DV_PREBOOT}"
+function inject() {
+  if [ -z "$1" ]; then echo "No line specified"; exit 1; fi
+  # If the line is not yet in the file, try to add it
+  if ! grep -q "$1" "$PREBOOT_COMMANDS_FILE"; then
+    # Check if the line is still not in the file when splitting at the first =
+    if ! grep -q "$(echo "$1" | cut -f1 -d"=")" "$PREBOOT_COMMANDS_FILE"; then
+      echo "$1" >> "$PREBOOT_COMMANDS_FILE"
+    fi
+  fi
+}
 
 # 1. Configure JMX (enabled by default on port 8686, but requires SSL)
 # See also https://blog.payara.fish/monitoring-payara-server-with-jconsole
 # To still use it, you can use a sidecar container proxying or using JMX via localhost without SSL.
 if [ "${ENABLE_JMX}" = "1" ]; then
   echo "Enabling unsecured JMX on 0.0.0.0:8686, enabling AMX and tuning monitoring levels to HIGH. You'll need a sidecar for this, as access is allowed from same machine only (without SSL)."
-  { \
-    echo "set configs.config.server-config.amx-configuration.enabled=true"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.jvm=HIGH"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.connector-service=HIGH"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.connector-connection-pool=HIGH"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.jdbc-connection-pool=HIGH"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.web-services-container=HIGH"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.ejb-container=HIGH"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.thread-pool=HIGH"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.http-service=HIGH"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.security=HIGH"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.jms-service=HIGH"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.jersey=HIGH"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.transaction-service=HIGH"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.jpa=HIGH"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.web-container=HIGH"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.orb=HIGH"
-    echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.deployment=HIGH"
-    echo "set configs.config.server-config.admin-service.jmx-connector.system.security-enabled=false"
-  } >> "${DV_PREBOOT}"
+  inject "set configs.config.server-config.amx-configuration.enabled=true"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.jvm=HIGH"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.connector-service=HIGH"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.connector-connection-pool=HIGH"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.jdbc-connection-pool=HIGH"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.web-services-container=HIGH"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.ejb-container=HIGH"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.thread-pool=HIGH"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.http-service=HIGH"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.security=HIGH"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.jms-service=HIGH"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.jersey=HIGH"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.transaction-service=HIGH"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.jpa=HIGH"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.web-container=HIGH"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.orb=HIGH"
+  inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.deployment=HIGH"
+  inject "set configs.config.server-config.admin-service.jmx-connector.system.security-enabled=false"
 fi
 
 # 2. Enable JDWP via debugging switch
@@ -55,17 +65,12 @@ fi
 # 3. Enable hot reload
 if [ "${ENABLE_RELOAD}" = "1" ]; then
   echo "Enabling hot reload of deployments."
-  echo "set configs.config.server-config.admin-service.das-config.dynamic-reload-enabled=true" >> "${DV_PREBOOT}"
-  echo "set configs.config.server-config.admin-service.das-config.autodeploy-enabled=true" >> "${DV_PREBOOT}"
-  export DATAVERSE_JSF_PROJECT_STAGE=${DATAVERSE_JSF_PROJECT_STAGE:-"Development"}
-  export DATAVERSE_JSF_REFRESH_PERIOD=${DATAVERSE_JSF_REFRESH_PERIOD:-"0"}
+  inject "set configs.config.server-config.admin-service.das-config.dynamic-reload-enabled=true"
+  inject "set configs.config.server-config.admin-service.das-config.autodeploy-enabled=true"
 fi
 
 # 4. Add the commands to the existing preboot file, but insert BEFORE deployment
-TMP_PREBOOT=$(mktemp)
-cat "${DV_PREBOOT}" "${PREBOOT_COMMANDS}" > "${TMP_PREBOOT}"
-mv "${TMP_PREBOOT}" "${PREBOOT_COMMANDS}"
-echo "DEBUG: preboot contains the following commands:"
+echo "DEBUG: preboot contains now the following commands:"
+echo "--------------------------------------------------"
+cat "${PREBOOT_COMMANDS_FILE}"
 echo "--------------------------------------------------"
-cat "${PREBOOT_COMMANDS}"
-echo "--------------------------------------------------"
\ No newline at end of file
diff --git a/modules/container-base/src/main/docker/scripts/startInForeground.sh b/modules/container-base/src/main/docker/scripts/startInForeground.sh
index 4843f6ae055..fa7d533b0d1 100644
--- a/modules/container-base/src/main/docker/scripts/startInForeground.sh
+++ b/modules/container-base/src/main/docker/scripts/startInForeground.sh
@@ -32,10 +32,11 @@
 ##########################################################################################################
 
 # Check required variables are set
-if [ -z "$ADMIN_USER" ]; then echo "Variable ADMIN_USER is not set."; exit 1; fi
-if [ -z "$PASSWORD_FILE" ]; then echo "Variable PASSWORD_FILE is not set."; exit 1; fi
-if [ -z "$PREBOOT_COMMANDS" ]; then echo "Variable PREBOOT_COMMANDS is not set."; exit 1; fi
-if [ -z "$POSTBOOT_COMMANDS" ]; then echo "Variable POSTBOOT_COMMANDS is not set."; exit 1; fi
+if [ -z "$PAYARA_ADMIN_USER" ]; then echo "Variable ADMIN_USER is not set."; exit 1; fi
+if [ -z "$PAYARA_ADMIN_PASSWORD" ]; then echo "Variable ADMIN_PASSWORD is not set."; exit 1; fi
+if [ -z "$DOMAIN_PASSWORD" ]; then echo "Variable DOMAIN_PASSWORD is not set."; exit 1; fi
+if [ -z "$PREBOOT_COMMANDS_FILE" ]; then echo "Variable PREBOOT_COMMANDS_FILE is not set."; exit 1; fi
+if [ -z "$POSTBOOT_COMMANDS_FILE" ]; then echo "Variable POSTBOOT_COMMANDS_FILE is not set."; exit 1; fi
 if [ -z "$DOMAIN_NAME" ]; then echo "Variable DOMAIN_NAME is not set."; exit 1; fi
 
 # Check if dumps are enabled - add arg to JVM_ARGS in this case
@@ -43,6 +44,13 @@ if [ -n "${ENABLE_DUMPS}" ] && [ "${ENABLE_DUMPS}" = "1" ]; then
   JVM_ARGS="${JVM_DUMPS_ARG} ${JVM_ARGS}"
 fi
 
+# For safety reasons, do no longer expose the passwords - malicious code could extract it!
+# (We need to save the master password for booting the server though)
+MASTER_PASSWORD="${DOMAIN_PASSWORD}"
+export LINUX_PASSWORD="have-some-scrambled-eggs"
+export PAYARA_ADMIN_PASSWORD="have-some-scrambled-eggs"
+export DOMAIN_PASSWORD="have-some-scrambled-eggs"
+
 # The following command gets the command line to be executed by start-domain
 # - print the command line to the server with --dry-run, each argument on a separate line
 # - remove -read-string argument
@@ -50,19 +58,25 @@ fi
 # - remove lines before and after the command line and squash commands on a single line
 
 # Create pre and post boot command files if they don't exist
-touch "$POSTBOOT_COMMANDS"
-touch "$PREBOOT_COMMANDS"
+touch "$POSTBOOT_COMMANDS_FILE" || exit 1
+touch "$PREBOOT_COMMANDS_FILE" || exit 1
 
+# This workaround is necessary due to limitations of asadmin
+PASSWORD_FILE=$(mktemp)
+echo "AS_ADMIN_MASTERPASSWORD=$MASTER_PASSWORD" > "$PASSWORD_FILE"
 # shellcheck disable=SC2068
 #   -- Using $@ is necessary here as asadmin cannot deal with options enclosed in ""!
-OUTPUT=$("${PAYARA_DIR}"/bin/asadmin --user="${ADMIN_USER}" --passwordfile="${PASSWORD_FILE}" start-domain --dry-run --prebootcommandfile="${PREBOOT_COMMANDS}" --postbootcommandfile="${POSTBOOT_COMMANDS}" $@ "$DOMAIN_NAME")
+OUTPUT=$("${PAYARA_DIR}"/bin/asadmin --user="${PAYARA_ADMIN_USER}" --passwordfile="$PASSWORD_FILE" start-domain --dry-run --prebootcommandfile="${PREBOOT_COMMANDS_FILE}" --postbootcommandfile="${POSTBOOT_COMMANDS_FILE}" $@ "$DOMAIN_NAME")
 STATUS=$?
+rm "$PASSWORD_FILE"
 if [ "$STATUS" -ne 0 ]
   then
     echo ERROR: "$OUTPUT" >&2
     exit 1
 fi
 
+echo "Booting now..."
+
 COMMAND=$(echo "$OUTPUT"\
  | sed -n -e '2,/^$/p'\
  | sed "s|glassfish.jar|glassfish.jar $JVM_ARGS |g")
@@ -72,18 +86,6 @@ echo "$COMMAND" | tr ' ' '\n'
 echo
 
 # Run the server in foreground - read master password from variable or file or use the default "changeit" password
-
-set +x
-if test "$AS_ADMIN_MASTERPASSWORD"x = x -a -f "$PASSWORD_FILE"
-  then
-    # shellcheck disable=SC1090
-    source "$PASSWORD_FILE"
-fi
-if test "$AS_ADMIN_MASTERPASSWORD"x = x
-  then
-    AS_ADMIN_MASTERPASSWORD=changeit
-fi
-echo "AS_ADMIN_MASTERPASSWORD=$AS_ADMIN_MASTERPASSWORD" > /tmp/masterpwdfile
 # shellcheck disable=SC2086
 #   -- Unquoted exec var is necessary, as otherwise things get escaped that may not be escaped (parameters for Java)
-exec ${COMMAND} < /tmp/masterpwdfile
+exec ${COMMAND} < <(echo "AS_ADMIN_MASTERPASSWORD=$MASTER_PASSWORD")
diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile
index dae4a3aa272..351425a17ba 100644
--- a/modules/container-configbaker/Dockerfile
+++ b/modules/container-configbaker/Dockerfile
@@ -21,7 +21,7 @@ ENV SCRIPT_DIR="/scripts" \
 ENV PATH="${PATH}:${SCRIPT_DIR}" \
     BOOTSTRAP_DIR="${SCRIPT_DIR}/bootstrap"
 
-ARG APK_PACKAGES="curl bind-tools netcat-openbsd jq bash dumb-init wait4x ed postgresql-client"
+ARG APK_PACKAGES="curl bind-tools netcat-openbsd jq bash dumb-init wait4x ed postgresql-client aws-cli"
 
 RUN true && \
   # Install necessary software and tools
diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml
index ee80ca6ed1d..8c0b0558b1e 100644
--- a/modules/dataverse-parent/pom.xml
+++ b/modules/dataverse-parent/pom.xml
@@ -198,7 +198,7 @@
         <pomchecker-maven-plugin.version>1.7.0</pomchecker-maven-plugin.version>
         
         <!-- Container related -->
-        <fabric8-dmp.version>0.43.4</fabric8-dmp.version>
+        <fabric8-dmp.version>0.45.0</fabric8-dmp.version>
     </properties>
     
     <pluginRepositories>
diff --git a/scripts/search/tests/data/dataset-finch3.json b/scripts/search/tests/data/dataset-finch3.json
new file mode 100644
index 00000000000..903b0aa124d
--- /dev/null
+++ b/scripts/search/tests/data/dataset-finch3.json
@@ -0,0 +1,102 @@
+{
+  "datasetVersion": {
+    "license": {
+      "name": "CC0 1.0",
+      "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+    },
+    "metadataBlocks": {
+      "citation": {
+        "fields": [
+          {
+            "value": "HTML & More",
+            "typeClass": "primitive",
+            "multiple": false,
+            "typeName": "title"
+          },
+          {
+            "value": [
+              {
+                "authorName": {
+                  "value": "Markup, Marty",
+                  "typeClass": "primitive",
+                  "multiple": false,
+                  "typeName": "authorName"
+                },
+                "authorAffiliation": {
+                  "value": "W4C",
+                  "typeClass": "primitive",
+                  "multiple": false,
+                  "typeName": "authorAffiliation"
+                }
+              }
+            ],
+            "typeClass": "compound",
+            "multiple": true,
+            "typeName": "author"
+          },
+          {
+            "value": [
+              {
+                "datasetContactEmail": {
+                  "typeClass": "primitive",
+                  "multiple": false,
+                  "typeName": "datasetContactEmail",
+                  "value": "markup@mailinator.com"
+                },
+                "datasetContactName": {
+                  "typeClass": "primitive",
+                  "multiple": false,
+                  "typeName": "datasetContactName",
+                  "value": "Markup, Marty"
+                }
+              }
+            ],
+            "typeClass": "compound",
+            "multiple": true,
+            "typeName": "datasetContact"
+          },
+          {
+            "value": [
+              {
+                "dsDescriptionValue": {
+                  "value": "BEGIN<br></br>END",
+                  "multiple": false,
+                  "typeClass": "primitive",
+                  "typeName": "dsDescriptionValue"
+                },
+                "dsDescriptionDate": {
+                  "typeName": "dsDescriptionDate",
+                  "multiple": false,
+                  "typeClass": "primitive",
+                  "value": "2021-07-13"
+                }
+              }
+            ],
+            "typeClass": "compound",
+            "multiple": true,
+            "typeName": "dsDescription"
+          },
+          {
+            "value": [
+              "Medicine, Health and Life Sciences"
+            ],
+            "typeClass": "controlledVocabulary",
+            "multiple": true,
+            "typeName": "subject"
+          },
+          {
+            "typeName": "language",
+            "multiple": true,
+            "typeClass": "controlledVocabulary",
+            "value": [
+              "English",
+              "Afar",
+              "aar"
+            ]
+          }
+        ],
+        "displayName": "Citation Metadata"
+      }
+    }
+  }
+}
diff --git a/src/main/docker/scripts/init_2_configure.sh b/src/main/docker/scripts/init_2_configure.sh
index b31cfac37b7..5c1075f01f3 100755
--- a/src/main/docker/scripts/init_2_configure.sh
+++ b/src/main/docker/scripts/init_2_configure.sh
@@ -22,17 +22,35 @@ if [ "${dataverse_files_storage__driver__id}" = "local" ]; then
   export dataverse_files_local_directory="${dataverse_files_local_directory:-${STORAGE_DIR}/store}"
 fi
 
-# 0. Define postboot commands file to be read by Payara and clear it
-DV_POSTBOOT=${PAYARA_DIR}/dataverse_postboot
-echo "# Dataverse postboot configuration for Payara" > "${DV_POSTBOOT}"
+# If reload is enable via ENABLE_RELOAD=1, set according Jakarta Faces options
+ENABLE_RELOAD=${ENABLE_RELOAD:-0}
+if [ "${ENABLE_RELOAD}" = "1" ]; then
+  export DATAVERSE_JSF_PROJECT_STAGE=${DATAVERSE_JSF_PROJECT_STAGE:-"Development"}
+  export DATAVERSE_JSF_REFRESH_PERIOD=${DATAVERSE_JSF_REFRESH_PERIOD:-"0"}
+fi
+
+# Check prerequisites for commands handling
+if [ -z "$POSTBOOT_COMMANDS_FILE" ]; then echo "Variable POSTBOOT_COMMANDS_FILE is not set."; exit 1; fi
+# Test if postboot file is writeable for us, exit otherwise
+touch "$POSTBOOT_COMMANDS_FILE" || exit 1
+# Copy and split the postboot contents to manipulate them
+EXISTING_DEPLOY_COMMANDS=$(mktemp)
+NEW_POSTBOOT_COMMANDS=$(mktemp)
+grep -e "^deploy " "$POSTBOOT_COMMANDS_FILE" > "$EXISTING_DEPLOY_COMMANDS" || true
+grep -v -e "^deploy" "$POSTBOOT_COMMANDS_FILE" > "$NEW_POSTBOOT_COMMANDS" || true
 
-# 2. Domain-spaced resources (JDBC, JMS, ...)
-# TODO: This is ugly and dirty. It should be replaced with resources from
-#       EE 8 code annotations or at least glassfish-resources.xml
-# NOTE: postboot commands is not multi-line capable, thus spaghetti needed.
+function inject() {
+  if [ -z "$1" ]; then echo "No line specified"; exit 1; fi
+  # If the line is not yet in the file, try to add it
+  if ! grep -q "$1" "$NEW_POSTBOOT_COMMANDS"; then
+    # Check if the line is still not in the file when splitting at the first =
+    if ! grep -q "$(echo "$1" | cut -f1 -d"=")" "$NEW_POSTBOOT_COMMANDS"; then
+      echo "$1" >> "$NEW_POSTBOOT_COMMANDS"
+    fi
+  fi
+}
 
-# 3. Domain based configuration options
-# Set Dataverse environment variables
+# Domain based configuration options - set from Dataverse environment variables
 echo "INFO: Defining system properties for Dataverse configuration options."
 #env | grep -Ee "^(dataverse|doi)_" | sort -fd
 env -0 | grep -z -Ee "^(dataverse|doi)_" | while IFS='=' read -r -d '' k v; do
@@ -47,14 +65,12 @@ env -0 | grep -z -Ee "^(dataverse|doi)_" | while IFS='=' read -r -d '' k v; do
     v=$(echo "${v}" | sed -e 's/:/\\\:/g')
 
     echo "DEBUG: Handling ${KEY}=${v}."
-    echo "create-system-properties ${KEY}=${v}" >> "${DV_POSTBOOT}"
+    inject "create-system-properties ${KEY}=${v}"
 done
 
 # 4. Add the commands to the existing postboot file, but insert BEFORE deployment
-TMPFILE=$(mktemp)
-cat "${DV_POSTBOOT}" "${POSTBOOT_COMMANDS}" > "${TMPFILE}" && mv "${TMPFILE}" "${POSTBOOT_COMMANDS}"
+cat "$NEW_POSTBOOT_COMMANDS" "$EXISTING_DEPLOY_COMMANDS" > "${POSTBOOT_COMMANDS_FILE}"
 echo "DEBUG: postboot contains the following commands:"
 echo "--------------------------------------------------"
-cat "${POSTBOOT_COMMANDS}"
+cat "${POSTBOOT_COMMANDS_FILE}"
 echo "--------------------------------------------------"
-
diff --git a/src/main/docker/scripts/init_3_wait_dataverse_db_host.sh b/src/main/docker/scripts/init_3_wait_dataverse_db_host.sh
index c234ad33307..06b41d60507 100644
--- a/src/main/docker/scripts/init_3_wait_dataverse_db_host.sh
+++ b/src/main/docker/scripts/init_3_wait_dataverse_db_host.sh
@@ -1,4 +1,4 @@
 #It was reported on 9949 that on the first launch of the containers Dataverse would not be deployed on payara
 #this was caused by a race condition due postgress not being ready. A solion for docker compose was prepared
 #but didn't work due a compatibility issue on the Maven pluggin [https://github.com/fabric8io/docker-maven-plugin/issues/888]
-wait-for "${DATAVERSE_DB_HOST:-postgres}:${DATAVERSE_DB_PORT:-5432}" -t 120
\ No newline at end of file
+wait4x tcp "${DATAVERSE_DB_HOST:-postgres}:${DATAVERSE_DB_PORT:-5432}" -t 120s
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
index a012175deae..3977023fc4b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
@@ -15,6 +15,7 @@
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Date;
@@ -253,7 +254,7 @@ public String toBibtexString() {
     
     public void writeAsBibtexCitation(OutputStream os) throws IOException {
         // Use UTF-8
-        Writer out = new BufferedWriter(new OutputStreamWriter(os, "utf-8"));
+        Writer out = new BufferedWriter(new OutputStreamWriter(os, StandardCharsets.UTF_8));
         if(getFileTitle() !=null && isDirect()) {
             out.write("@incollection{");
         } else {
@@ -317,7 +318,7 @@ public String toRISString() {
 
     public void writeAsRISCitation(OutputStream os) throws IOException {
         // Use UTF-8
-        Writer out = new BufferedWriter(new OutputStreamWriter(os, "utf-8"));
+        Writer out = new BufferedWriter(new OutputStreamWriter(os, StandardCharsets.UTF_8));
         out.write("Provider: " + publisher + "\r\n");
         out.write("Content: text/plain; charset=\"utf-8\"" + "\r\n");
         // Using type "DATA" - see https://github.com/IQSS/dataverse/issues/4816
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
index 21f925f8981..9331ec67d12 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
@@ -1248,14 +1248,6 @@ public List<Long> selectFilesWithMissingOriginalSizes() {
     }
     
 
-    /**
-     * Check that a identifier entered by the user is unique (not currently used
-     * for any other study in this Dataverse Network). Also check for duplicate
-     * in the remote PID service if needed
-     * @param datafileId
-     * @param storageLocation
-     * @return  {@code true} iff the global identifier is unique.
-     */
     public void finalizeFileDelete(Long dataFileId, String storageLocation) throws IOException {
         // Verify that the DataFile no longer exists: 
         if (find(dataFileId) != null) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java
index 31e7758c7d5..4bf6c00f199 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java
@@ -54,15 +54,18 @@ public int compare(DatasetField o1, DatasetField o2) {
                                     o2.getDatasetFieldType().getDisplayOrder() );
     }};
 
-    public static DatasetField createNewEmptyDatasetField(DatasetFieldType dsfType, Object dsv) {
+    public static DatasetField createNewEmptyDatasetField(DatasetFieldType dsfType, DatasetVersion dsv) {
         
         DatasetField dsfv = createNewEmptyDatasetField(dsfType);
-        //TODO - a better way to handle this?
-        if (dsv.getClass().getName().equals("edu.harvard.iq.dataverse.DatasetVersion")){
-                   dsfv.setDatasetVersion((DatasetVersion)dsv); 
-        } else {
-            dsfv.setTemplate((Template)dsv);
-        }
+        dsfv.setDatasetVersion(dsv);
+
+        return dsfv;
+    }
+
+    public static DatasetField createNewEmptyDatasetField(DatasetFieldType dsfType, Template dsv) {
+        
+        DatasetField dsfv = createNewEmptyDatasetField(dsfType);
+        dsfv.setTemplate(dsv);
 
         return dsfv;
     }
@@ -545,9 +548,12 @@ public String toString() {
         return "edu.harvard.iq.dataverse.DatasetField[ id=" + id + " ]";
     }
 
-    public DatasetField copy(Object version) {
+    public DatasetField copy(DatasetVersion version) {
         return copy(version, null);
     }
+    public DatasetField copy(Template template) {
+        return copy(template, null);
+    }
     
     // originally this was an overloaded method, but we renamed it to get around an issue with Bean Validation
     // (that looked t overloaded methods, when it meant to look at overriden methods
@@ -555,15 +561,15 @@ public DatasetField copyChild(DatasetFieldCompoundValue parent) {
         return copy(null, parent);
     }
 
-    private DatasetField copy(Object version, DatasetFieldCompoundValue parent) {
+    private DatasetField copy(Object versionOrTemplate, DatasetFieldCompoundValue parent) {
         DatasetField dsf = new DatasetField();
         dsf.setDatasetFieldType(datasetFieldType);
         
-        if (version != null) {
-            if (version.getClass().getName().equals("edu.harvard.iq.dataverse.DatasetVersion")) {
-                dsf.setDatasetVersion((DatasetVersion) version);               
+        if (versionOrTemplate != null) {
+            if (versionOrTemplate instanceof DatasetVersion) {
+                dsf.setDatasetVersion((DatasetVersion) versionOrTemplate);               
             } else {
-                dsf.setTemplate((Template) version);
+                dsf.setTemplate((Template) versionOrTemplate);
             }
         }
         
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
index 34595728fa7..1aaf2534048 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
@@ -39,6 +39,7 @@
 import jakarta.persistence.PersistenceException;
 import jakarta.persistence.TypedQuery;
 
+import jakarta.persistence.criteria.*;
 import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.httpclient.HttpException;
 import org.apache.commons.lang3.StringUtils;
@@ -851,4 +852,125 @@ public String getFieldLanguage(String languages, String localeCode) {
         }
         return null;
     }
+
+    public List<DatasetFieldType> findAllDisplayedOnCreateInMetadataBlock(MetadataBlock metadataBlock) {
+        CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
+        CriteriaQuery<DatasetFieldType> criteriaQuery = criteriaBuilder.createQuery(DatasetFieldType.class);
+
+        Root<MetadataBlock> metadataBlockRoot = criteriaQuery.from(MetadataBlock.class);
+        Root<DatasetFieldType> datasetFieldTypeRoot = criteriaQuery.from(DatasetFieldType.class);
+
+        Predicate requiredInDataversePredicate = buildRequiredInDataversePredicate(criteriaBuilder, datasetFieldTypeRoot);
+
+        criteriaQuery.where(
+                criteriaBuilder.and(
+                        criteriaBuilder.equal(metadataBlockRoot.get("id"), metadataBlock.getId()),
+                        datasetFieldTypeRoot.in(metadataBlockRoot.get("datasetFieldTypes")),
+                        criteriaBuilder.or(
+                                criteriaBuilder.isTrue(datasetFieldTypeRoot.get("displayOnCreate")),
+                                requiredInDataversePredicate
+                        )
+                )
+        );
+
+        criteriaQuery.select(datasetFieldTypeRoot).distinct(true);
+
+        TypedQuery<DatasetFieldType> typedQuery = em.createQuery(criteriaQuery);
+        return typedQuery.getResultList();
+    }
+
+    public List<DatasetFieldType> findAllInMetadataBlockAndDataverse(MetadataBlock metadataBlock, Dataverse dataverse, boolean onlyDisplayedOnCreate) {
+        CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
+        CriteriaQuery<DatasetFieldType> criteriaQuery = criteriaBuilder.createQuery(DatasetFieldType.class);
+
+        Root<MetadataBlock> metadataBlockRoot = criteriaQuery.from(MetadataBlock.class);
+        Root<DatasetFieldType> datasetFieldTypeRoot = criteriaQuery.from(DatasetFieldType.class);
+        Root<Dataverse> dataverseRoot = criteriaQuery.from(Dataverse.class);
+
+        // Join Dataverse with DataverseFieldTypeInputLevel on the "dataverseFieldTypeInputLevels" attribute, using a LEFT JOIN.
+        Join<Dataverse, DataverseFieldTypeInputLevel> datasetFieldTypeInputLevelJoin = dataverseRoot.join("dataverseFieldTypeInputLevels", JoinType.LEFT);
+
+        // Define a predicate to include DatasetFieldTypes that are marked as included in the input level.
+        Predicate includedAsInputLevelPredicate = criteriaBuilder.and(
+                criteriaBuilder.equal(datasetFieldTypeRoot, datasetFieldTypeInputLevelJoin.get("datasetFieldType")),
+                criteriaBuilder.isTrue(datasetFieldTypeInputLevelJoin.get("include"))
+        );
+
+        // Define a predicate to include DatasetFieldTypes that are marked as required in the input level.
+        Predicate requiredAsInputLevelPredicate = criteriaBuilder.and(
+                criteriaBuilder.equal(datasetFieldTypeRoot, datasetFieldTypeInputLevelJoin.get("datasetFieldType")),
+                criteriaBuilder.isTrue(datasetFieldTypeInputLevelJoin.get("required"))
+        );
+
+        // Create a subquery to check for the absence of a specific DataverseFieldTypeInputLevel.
+        Subquery<Long> subquery = criteriaQuery.subquery(Long.class);
+        Root<DataverseFieldTypeInputLevel> subqueryRoot = subquery.from(DataverseFieldTypeInputLevel.class);
+        subquery.select(criteriaBuilder.literal(1L))
+                .where(
+                        criteriaBuilder.equal(subqueryRoot.get("dataverse"), dataverseRoot),
+                        criteriaBuilder.equal(subqueryRoot.get("datasetFieldType"), datasetFieldTypeRoot)
+                );
+
+        // Define a predicate to exclude DatasetFieldTypes that have no associated input level (i.e., the subquery does not return a result).
+        Predicate hasNoInputLevelPredicate = criteriaBuilder.not(criteriaBuilder.exists(subquery));
+
+        // Define a predicate to include the required fields in Dataverse.
+        Predicate requiredInDataversePredicate = buildRequiredInDataversePredicate(criteriaBuilder, datasetFieldTypeRoot);
+
+        // Define a predicate for displaying DatasetFieldTypes on create.
+        // If onlyDisplayedOnCreate is true, include fields that:
+        // - Are either marked as displayed on create OR marked as required, OR
+        // - Are required according to the input level.
+        // Otherwise, use an always-true predicate (conjunction).
+        Predicate displayedOnCreatePredicate = onlyDisplayedOnCreate
+                ? criteriaBuilder.or(
+                criteriaBuilder.or(
+                        criteriaBuilder.isTrue(datasetFieldTypeRoot.get("displayOnCreate")),
+                        requiredInDataversePredicate
+                ),
+                requiredAsInputLevelPredicate
+        )
+                : criteriaBuilder.conjunction();
+
+        // Build the final WHERE clause by combining all the predicates.
+        criteriaQuery.where(
+                criteriaBuilder.equal(dataverseRoot.get("id"), dataverse.getId()), // Match the Dataverse ID.
+                criteriaBuilder.equal(metadataBlockRoot.get("id"), metadataBlock.getId()), // Match the MetadataBlock ID.
+                metadataBlockRoot.in(dataverseRoot.get("metadataBlocks")), // Ensure the MetadataBlock is part of the Dataverse.
+                datasetFieldTypeRoot.in(metadataBlockRoot.get("datasetFieldTypes")), // Ensure the DatasetFieldType is part of the MetadataBlock.
+                criteriaBuilder.or(includedAsInputLevelPredicate, hasNoInputLevelPredicate), // Include DatasetFieldTypes based on the input level predicates.
+                displayedOnCreatePredicate // Apply the display-on-create filter if necessary.
+        );
+
+        criteriaQuery.select(datasetFieldTypeRoot).distinct(true);
+
+        return em.createQuery(criteriaQuery).getResultList();
+    }
+
+    private Predicate buildRequiredInDataversePredicate(CriteriaBuilder criteriaBuilder, Root<DatasetFieldType> datasetFieldTypeRoot) {
+        // Predicate to check if the current DatasetFieldType is required.
+        Predicate isRequired = criteriaBuilder.isTrue(datasetFieldTypeRoot.get("required"));
+
+        // Subquery to check if the parentDatasetFieldType is required or null.
+        // We need this check to avoid including conditionally required fields.
+        Subquery<Boolean> subquery = criteriaBuilder.createQuery(Boolean.class).subquery(Boolean.class);
+        Root<DatasetFieldType> parentRoot = subquery.from(DatasetFieldType.class);
+
+        subquery.select(criteriaBuilder.literal(true))
+                .where(
+                        criteriaBuilder.equal(parentRoot, datasetFieldTypeRoot.get("parentDatasetFieldType")),
+                        criteriaBuilder.or(
+                                criteriaBuilder.isNull(parentRoot.get("required")),
+                                criteriaBuilder.isTrue(parentRoot.get("required"))
+                        )
+                );
+
+        // Predicate to check that either the parentDatasetFieldType meets the condition or doesn't exist (is null).
+        Predicate parentCondition = criteriaBuilder.or(
+                criteriaBuilder.exists(subquery),
+                criteriaBuilder.isNull(datasetFieldTypeRoot.get("parentDatasetFieldType"))
+        );
+
+        return criteriaBuilder.and(isRequired, parentCondition);
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index eae4a9f2977..8522f2733c7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -118,6 +118,7 @@
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDatasetCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetLinkingDataverseCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.GetLatestPublishedDatasetVersionCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.RequestRsyncScriptCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetResult;
@@ -3562,6 +3563,16 @@ public void saveLinkingDataverses(ActionEvent evt) {
         }
         alreadyLinkedDataverses = null; //force update to list of linked dataverses
     }
+    public void deleteLinkingDataverses(ActionEvent evt) {
+
+        if (deleteLink(selectedDataverseForLinking)) {
+            JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.unlinkSuccess", getSuccessMessageArguments()));
+        } else {
+            FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.notlinked"), linkingDataverseErrorMessage);
+            FacesContext.getCurrentInstance().addMessage(null, message);
+        }
+        alreadyLinkedDataverses = null; //force update to list of linked dataverses
+    }
 
     private String linkingDataverseErrorMessage = "";
 
@@ -3596,6 +3607,25 @@ private Boolean saveLink(Dataverse dataverse){
         }
         return retVal;
     }
+    private Boolean deleteLink(Dataverse dataverse){
+        boolean retVal = true;
+        linkingDataverse = dataverse;
+        try {
+            DatasetLinkingDataverse dsld = dsLinkingService.findDatasetLinkingDataverse(dataset.getId(), linkingDataverse.getId());
+            DeleteDatasetLinkingDataverseCommand cmd = new DeleteDatasetLinkingDataverseCommand(dvRequestService.getDataverseRequest(), dataset, dsld, true);
+            commandEngine.submit(cmd);
+        } catch (CommandException ex) {
+            String msg = "There was a problem removing the link between this dataset to yours: " + ex;
+            logger.severe(msg);
+            msg = BundleUtil.getStringFromBundle("dataset.notlinked.msg") + ex;
+            /**
+             * @todo how do we get this message to show up in the GUI?
+             */
+            linkingDataverseErrorMessage = msg;
+            retVal = false;
+        }
+        return retVal;
+    }
         
     private String alreadyLinkedDataverses = null;
     
@@ -3622,6 +3652,14 @@ public List<Dataverse> completeLinkingDataverse(String query) {
             return null;
         }
     }
+    public List<Dataverse> completeUnLinkingDataverse(String query) {
+        dataset = datasetService.find(dataset.getId());
+        if (session.getUser().isAuthenticated()) {
+            return dataverseService.filterDataversesForUnLinking(query, dvRequestService.getDataverseRequest(), dataset);
+        } else {
+            return null;
+        }
+    }
 
     public List<Dataverse> completeHostDataverseMenuList(String query) {
         if (session.getUser().isAuthenticated()) {
@@ -5580,12 +5618,19 @@ public void setPrivateUrlJustCreatedToFalse() {
     public boolean isShowLinkingPopup() {
         return showLinkingPopup;
     }
+    public boolean isShowUnLinkingPopup() {
+        return showUnLinkingPopup;
+    }
 
     public void setShowLinkingPopup(boolean showLinkingPopup) {
         this.showLinkingPopup = showLinkingPopup;
     }
+    public void setShowUnLinkingPopup(boolean showUnLinkingPopup) {
+        this.showUnLinkingPopup = showUnLinkingPopup;
+    }
 
     private boolean showLinkingPopup = false;
+    private boolean showUnLinkingPopup = false;
     private Boolean anonymizedAccess = null;
 
     //
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
index f99b3ee1b53..8ddc9545763 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
@@ -316,8 +316,8 @@ private void msg(String s){
     }
     
     public boolean isVersionDefaultCustomTerms(DatasetVersion datasetVersion) {
-
-        if (datasetVersion.getId() != null) {
+        //SEK - belt and suspenders here, but this is where the bug 10719 first manifested
+        if (datasetVersion != null && datasetVersion.getId() != null) {
             try {
                 TermsOfUseAndAccess toua = (TermsOfUseAndAccess) em.createNamedQuery("TermsOfUseAndAccess.findByDatasetVersionIdAndDefaultTerms")
                         .setParameter("id", datasetVersion.getId()).setParameter("defaultTerms", TermsOfUseAndAccess.DEFAULT_NOTERMS).getSingleResult();
diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java
index 978c716e058..86e2e0207c1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java
@@ -30,7 +30,6 @@
 import jakarta.persistence.OneToOne;
 import jakarta.persistence.OrderBy;
 import jakarta.persistence.Table;
-import jakarta.persistence.Transient;
 import jakarta.validation.constraints.NotNull;
 import jakarta.validation.constraints.Pattern;
 import jakarta.validation.constraints.Size;
@@ -593,7 +592,7 @@ public void setDataverseTheme(DataverseTheme dataverseTheme) {
     }
 
     public void setMetadataBlocks(List<MetadataBlock> metadataBlocks) {
-        this.metadataBlocks = metadataBlocks;
+        this.metadataBlocks = new ArrayList<>(metadataBlocks);
     }
 
     public List<DatasetFieldType> getCitationDatasetFieldTypes() {
@@ -834,4 +833,17 @@ public boolean isAncestorOf( DvObject other ) {
     public String getLocalURL() {
         return  SystemConfig.getDataverseSiteUrlStatic() + "/dataverse/" + this.getAlias();
     }
+
+    public void addInputLevelsMetadataBlocksIfNotPresent(List<DataverseFieldTypeInputLevel> inputLevels) {
+        for (DataverseFieldTypeInputLevel inputLevel : inputLevels) {
+            MetadataBlock inputLevelMetadataBlock = inputLevel.getDatasetFieldType().getMetadataBlock();
+            if (!hasMetadataBlock(inputLevelMetadataBlock)) {
+                metadataBlocks.add(inputLevelMetadataBlock);
+            }
+        }
+    }
+
+    private boolean hasMetadataBlock(MetadataBlock metadataBlock) {
+        return metadataBlocks.stream().anyMatch(block -> block.getId().equals(metadataBlock.getId()));
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java
index 10b5d800c21..8c3cc1b2e7e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java
@@ -22,7 +22,7 @@
 import edu.harvard.iq.dataverse.storageuse.StorageQuota;
 import edu.harvard.iq.dataverse.util.StringUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import edu.harvard.iq.dataverse.util.json.JsonUtil;
+
 import java.io.File;
 import java.io.IOException;
 import java.sql.Timestamp;
@@ -34,6 +34,7 @@
 import java.util.logging.Logger;
 import java.util.Properties;
 
+import edu.harvard.iq.dataverse.validation.JSONDataValidation;
 import jakarta.ejb.EJB;
 import jakarta.ejb.Stateless;
 import jakarta.inject.Inject;
@@ -503,7 +504,19 @@ public List<Dataverse> filterDataversesForLinking(String query, DataverseRequest
 
         return dataverseList;
     }
-    
+    public List<Dataverse> filterDataversesForUnLinking(String query, DataverseRequest req, Dataset dataset) {
+        List<Object> alreadyLinkeddv_ids = em.createNativeQuery("SELECT linkingdataverse_id FROM datasetlinkingdataverse WHERE dataset_id = " + dataset.getId()).getResultList();
+        List<Dataverse> dataverseList = new ArrayList<>();
+        if (alreadyLinkeddv_ids != null && !alreadyLinkeddv_ids.isEmpty()) {
+            alreadyLinkeddv_ids.stream().map((testDVId) -> this.find(testDVId)).forEachOrdered((dataverse) -> {
+                if (this.permissionService.requestOn(req, dataverse).has(Permission.PublishDataset)) {
+                    dataverseList.add(dataverse);
+                }
+            });
+        }
+        return dataverseList;
+    }
+
     public List<Dataverse> filterDataversesForHosting(String pattern, DataverseRequest req) {
 
         // Find the dataverses matching the search parameters: 
@@ -888,14 +901,16 @@ public List<Object[]> getDatasetTitlesWithinDataverse(Long dataverseId) {
         return em.createNativeQuery(cqString).getResultList();
     }
 
-        
     public  String getCollectionDatasetSchema(String dataverseAlias) {
+        return getCollectionDatasetSchema(dataverseAlias, null);
+    }
+    public  String getCollectionDatasetSchema(String dataverseAlias, Map<String, Map<String,List<String>>> schemaChildMap) {
         
         Dataverse testDV = this.findByAlias(dataverseAlias);
         
         while (!testDV.isMetadataBlockRoot()) {
             if (testDV.getOwner() == null) {
-                break; // we are at the root; which by defintion is metadata blcok root, regarldess of the value
+                break; // we are at the root; which by definition is metadata block root, regardless of the value
             }
             testDV = testDV.getOwner();
         }
@@ -932,6 +947,8 @@ public  String getCollectionDatasetSchema(String dataverseAlias) {
                         dsft.setRequiredDV(dsft.isRequired());
                         dsft.setInclude(true);
                     }
+                    List<String> childrenRequired = new ArrayList<>();
+                    List<String> childrenAllowed = new ArrayList<>();
                     if (dsft.isHasChildren()) {
                         for (DatasetFieldType child : dsft.getChildDatasetFieldTypes()) {
                             DataverseFieldTypeInputLevel dsfIlChild = dataverseFieldTypeInputLevelService.findByDataverseIdDatasetFieldTypeId(testDV.getId(), child.getId());
@@ -944,8 +961,18 @@ public  String getCollectionDatasetSchema(String dataverseAlias) {
                                 child.setRequiredDV(child.isRequired() && dsft.isRequired());
                                 child.setInclude(true);
                             }
+                            if (child.isRequired()) {
+                                childrenRequired.add(child.getName());
+                            }
+                            childrenAllowed.add(child.getName());
                         }
                     }
+                    if (schemaChildMap != null) {
+                        Map<String, List<String>> map = new HashMap<>();
+                        map.put("required", childrenRequired);
+                        map.put("allowed", childrenAllowed);
+                        schemaChildMap.put(dsft.getName(), map);
+                    }
                     if(dsft.isRequiredDV()){
                         requiredDSFT.add(dsft);
                     }
@@ -1021,11 +1048,13 @@ private String getCustomMDBSchema (MetadataBlock mdb, List<DatasetFieldType> req
     }
     
     public String isDatasetJsonValid(String dataverseAlias, String jsonInput) {
-        JSONObject rawSchema = new JSONObject(new JSONTokener(getCollectionDatasetSchema(dataverseAlias)));
+        Map<String, Map<String,List<String>>> schemaChildMap = new HashMap<>();
+        JSONObject rawSchema = new JSONObject(new JSONTokener(getCollectionDatasetSchema(dataverseAlias, schemaChildMap)));
         
-        try {               
+        try {
             Schema schema = SchemaLoader.load(rawSchema);
             schema.validate(new JSONObject(jsonInput)); // throws a ValidationException if this object is invalid
+            JSONDataValidation.validate(schema, schemaChildMap, jsonInput); // throws a ValidationException if any objects are invalid
         } catch (ValidationException vx) {
             logger.info(BundleUtil.getStringFromBundle("dataverses.api.validate.json.failed") + " " + vx.getErrorMessage()); 
             String accumulatedexceptions = "";
diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java
index f9cf061e771..a3dfbf81512 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Shib.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java
@@ -19,6 +19,7 @@
 import org.apache.commons.lang3.StringUtils;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -458,9 +459,9 @@ private String getRequiredValueFromAssertion(String key) throws Exception {
         if (attributeValue.isEmpty()) {
             throw new Exception(key + " was empty");
         }
-		if(systemConfig.isShibAttributeCharacterSetConversionEnabled()) {
-			attributeValue= new String( attributeValue.getBytes("ISO-8859-1"), "UTF-8");
-		}
+        if (systemConfig.isShibAttributeCharacterSetConversionEnabled()) {
+            attributeValue= new String( attributeValue.getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8);
+        }
         String trimmedValue = attributeValue.trim();
         logger.fine("The SAML assertion for \"" + key + "\" (required) was \"" + attributeValue + "\" and was trimmed to \"" + trimmedValue + "\".");
         return trimmedValue;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
index 00da4990996..16ac884180b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
@@ -286,7 +286,7 @@ private DataFile findDataFileOrDieWrapper(String fileId){
     @GET
     @AuthRequired
     @Path("datafile/{fileId:.+}")
-    @Produces({"application/xml"})
+    @Produces({"application/xml","*/*"})
     public Response datafile(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId, @QueryParam("gbrecs") boolean gbrecs, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ {
         
         // check first if there's a trailing slash, and chop it: 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
index d60884bad2f..550ad1b3043 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
@@ -65,6 +65,7 @@
 
 import java.io.InputStream;
 import java.io.StringReader;
+import java.nio.charset.StandardCharsets;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.logging.Level;
@@ -1153,7 +1154,7 @@ public void write(OutputStream os) throws IOException,
                         os.write(",\n".getBytes());
                     }
 
-                    os.write(output.build().toString().getBytes("UTF8"));
+                    os.write(output.build().toString().getBytes(StandardCharsets.UTF_8));
                     
                     if (!wroteObject) {
                         wroteObject = true;
@@ -1267,7 +1268,7 @@ public void write(OutputStream os) throws IOException,
                         os.write(",\n".getBytes());
                     }
 
-                    os.write(output.build().toString().getBytes("UTF8"));
+                    os.write(output.build().toString().getBytes(StandardCharsets.UTF_8));
                     
                     if (!wroteObject) {
                         wroteObject = true;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java
index 0e5b8226310..b51b1aa2612 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java
@@ -49,7 +49,7 @@ public void doBlock(ServletRequest sr, ServletResponse sr1, FilterChain fc) thro
         @Override
         public void doBlock(ServletRequest sr, ServletResponse sr1, FilterChain fc) throws IOException, ServletException {
             HttpServletResponse httpResponse = (HttpServletResponse) sr1;
-            httpResponse.getWriter().println("{ status:\"error\", message:\"Endpoint blocked. Please contact the dataverse administrator\"}" );
+            httpResponse.getWriter().println("{ \"status\":\"error\", \"message\":\"Endpoint blocked. Please contact the dataverse administrator\"}" );
             httpResponse.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
             httpResponse.setContentType("application/json");
         }
@@ -67,7 +67,7 @@ public void doBlock(ServletRequest sr, ServletResponse sr1, FilterChain fc) thro
                 fc.doFilter(sr, sr1);
             } else {
                 HttpServletResponse httpResponse = (HttpServletResponse) sr1;
-                httpResponse.getWriter().println("{ status:\"error\", message:\"Endpoint available from localhost only. Please contact the dataverse administrator\"}" );
+                httpResponse.getWriter().println("{ \"status\":\"error\", \"message\":\"Endpoint available from localhost only. Please contact the dataverse administrator\"}" );
                 httpResponse.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
                 httpResponse.setContentType("application/json");
             }
@@ -102,7 +102,7 @@ public void doBlock(ServletRequest sr, ServletResponse sr1, FilterChain fc) thro
             
             if ( block ) {
                 HttpServletResponse httpResponse = (HttpServletResponse) sr1;
-                httpResponse.getWriter().println("{ status:\"error\", message:\"Endpoint available using API key only. Please contact the dataverse administrator\"}" );
+                httpResponse.getWriter().println("{ \"status\":\"error\", \"message\":\"Endpoint available using API key only. Please contact the dataverse administrator\"}" );
                 httpResponse.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
                 httpResponse.setContentType("application/json");
             } else {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFields.java b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFields.java
new file mode 100644
index 00000000000..2ec35c896d9
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFields.java
@@ -0,0 +1,29 @@
+package edu.harvard.iq.dataverse.api;
+
+import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
+import edu.harvard.iq.dataverse.DatasetFieldType;
+import jakarta.ejb.EJB;
+import jakarta.ws.rs.*;
+import jakarta.ws.rs.core.Response;
+
+import java.util.List;
+
+import static edu.harvard.iq.dataverse.util.json.JsonPrinter.jsonDatasetFieldTypes;
+
+/**
+ * Api bean for managing dataset fields.
+ */
+@Path("datasetfields")
+@Produces("application/json")
+public class DatasetFields extends AbstractApiBean {
+
+    @EJB
+    DatasetFieldServiceBean datasetFieldService;
+
+    @GET
+    @Path("facetables")
+    public Response listAllFacetableDatasetFields() {
+        List<DatasetFieldType> datasetFieldTypes = datasetFieldService.findAllFacetableFieldTypes();
+        return ok(jsonDatasetFieldTypes(datasetFieldTypes));
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index 4b919c5ed82..ec51bb2c27f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -1,7 +1,6 @@
 package edu.harvard.iq.dataverse.api;
 
 import com.amazonaws.services.s3.model.PartETag;
-
 import edu.harvard.iq.dataverse.*;
 import edu.harvard.iq.dataverse.DatasetLock.Reason;
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
index 02b60fdb32a..ed2a8db5e06 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
@@ -29,6 +29,7 @@
 import edu.harvard.iq.dataverse.util.ConstraintViolationUtil;
 import edu.harvard.iq.dataverse.util.StringUtil;
 import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty;
+import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
 
 import edu.harvard.iq.dataverse.util.json.JSONLDUtil;
 import edu.harvard.iq.dataverse.util.json.JsonParseException;
@@ -60,8 +61,7 @@
 import jakarta.ws.rs.core.MediaType;
 import jakarta.ws.rs.core.Response;
 import jakarta.ws.rs.core.Response.Status;
-import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray;
-import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
+
 import java.io.IOException;
 import java.io.OutputStream;
 import java.text.MessageFormat;
@@ -111,6 +111,9 @@ public class Dataverses extends AbstractApiBean {
 
     @EJB
     SwordServiceBean swordService;
+
+    @EJB
+    PermissionServiceBean permissionService;
     
     @POST
     @AuthRequired
@@ -123,35 +126,47 @@ public Response addRoot(@Context ContainerRequestContext crc, String body) {
     @AuthRequired
     @Path("{identifier}")
     public Response addDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String parentIdtf) {
-
-        Dataverse d;
-        JsonObject dvJson;
+        Dataverse newDataverse;
+        JsonObject newDataverseJson;
         try {
-            dvJson = JsonUtil.getJsonObject(body);
-            d = jsonParser().parseDataverse(dvJson);
+            newDataverseJson = JsonUtil.getJsonObject(body);
+            newDataverse = jsonParser().parseDataverse(newDataverseJson);
         } catch (JsonParsingException jpe) {
             logger.log(Level.SEVERE, "Json: {0}", body);
-            return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
+            return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage()));
         } catch (JsonParseException ex) {
             logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex);
-            return error(Response.Status.BAD_REQUEST,
-                    "Error parsing the POSTed json into a dataverse: " + ex.getMessage());
+            return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparsetodataverse"), ex.getMessage()));
         }
 
         try {
+            JsonObject metadataBlocksJson = newDataverseJson.getJsonObject("metadataBlocks");
+            List<DataverseFieldTypeInputLevel> inputLevels = null;
+            List<MetadataBlock> metadataBlocks = null;
+            if (metadataBlocksJson != null) {
+                JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels");
+                inputLevels = inputLevelsArray != null ? parseInputLevels(inputLevelsArray, newDataverse) : null;
+
+                JsonArray metadataBlockNamesArray = metadataBlocksJson.getJsonArray("metadataBlockNames");
+                metadataBlocks = metadataBlockNamesArray != null ? parseNewDataverseMetadataBlocks(metadataBlockNamesArray) : null;
+            }
+
+            JsonArray facetIdsArray = newDataverseJson.getJsonArray("facetIds");
+            List<DatasetFieldType> facetList = facetIdsArray != null ? parseFacets(facetIdsArray) : null;
+
             if (!parentIdtf.isEmpty()) {
                 Dataverse owner = findDataverseOrDie(parentIdtf);
-                d.setOwner(owner);
+                newDataverse.setOwner(owner);
             }
 
             // set the dataverse - contact relationship in the contacts
-            for (DataverseContact dc : d.getDataverseContacts()) {
-                dc.setDataverse(d);
+            for (DataverseContact dc : newDataverse.getDataverseContacts()) {
+                dc.setDataverse(newDataverse);
             }
 
             AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc);
-            d = execCommand(new CreateDataverseCommand(d, createDataverseRequest(u), null, null));
-            return created("/dataverses/" + d.getAlias(), json(d));
+            newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facetList, inputLevels, metadataBlocks));
+            return created("/dataverses/" + newDataverse.getAlias(), json(newDataverse));
         } catch (WrappedResponse ww) {
 
             String error = ConstraintViolationUtil.getErrorStringForConstraintViolations(ww.getCause());
@@ -179,7 +194,21 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body,
 
         }
     }
-    
+
+    private List<MetadataBlock> parseNewDataverseMetadataBlocks(JsonArray metadataBlockNamesArray) throws WrappedResponse {
+        List<MetadataBlock> selectedMetadataBlocks = new ArrayList<>();
+        for (JsonString metadataBlockName : metadataBlockNamesArray.getValuesAs(JsonString.class)) {
+            MetadataBlock metadataBlock = metadataBlockSvc.findByName(metadataBlockName.getString());
+            if (metadataBlock == null) {
+                String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.metadatablocks.error.invalidmetadatablockname"), metadataBlockName);
+                throw new WrappedResponse(badRequest(errorMessage));
+            }
+            selectedMetadataBlocks.add(metadataBlock);
+        }
+
+        return selectedMetadataBlocks;
+    }
+
     @POST
     @AuthRequired
     @Path("{identifier}/validateDatasetJson")
@@ -637,13 +666,26 @@ public Response updateAttribute(@Context ContainerRequestContext crc, @PathParam
         }
     }
 
+    @GET
+    @AuthRequired
+    @Path("{identifier}/inputLevels")
+    public Response getInputLevels(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier) {
+        try {
+            Dataverse dataverse = findDataverseOrDie(identifier);
+            List<DataverseFieldTypeInputLevel> inputLevels = execCommand(new ListDataverseInputLevelsCommand(createDataverseRequest(getRequestUser(crc)), dataverse));
+            return ok(jsonDataverseInputLevels(inputLevels));
+        } catch (WrappedResponse e) {
+            return e.getResponse();
+        }
+    }
+
     @PUT
     @AuthRequired
     @Path("{identifier}/inputLevels")
     public Response updateInputLevels(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier, String jsonBody) {
         try {
             Dataverse dataverse = findDataverseOrDie(identifier);
-            List<DataverseFieldTypeInputLevel> newInputLevels = parseInputLevels(jsonBody, dataverse);
+            List<DataverseFieldTypeInputLevel> newInputLevels = parseInputLevels(Json.createReader(new StringReader(jsonBody)).readArray(), dataverse);
             execCommand(new UpdateDataverseInputLevelsCommand(dataverse, createDataverseRequest(getRequestUser(crc)), newInputLevels));
             return ok(BundleUtil.getStringFromBundle("dataverse.update.success"), JsonPrinter.json(dataverse));
         } catch (WrappedResponse e) {
@@ -651,9 +693,7 @@ public Response updateInputLevels(@Context ContainerRequestContext crc, @PathPar
         }
     }
 
-    private List<DataverseFieldTypeInputLevel> parseInputLevels(String jsonBody, Dataverse dataverse) throws WrappedResponse {
-        JsonArray inputLevelsArray = Json.createReader(new StringReader(jsonBody)).readArray();
-
+    private List<DataverseFieldTypeInputLevel> parseInputLevels(JsonArray inputLevelsArray, Dataverse dataverse) throws WrappedResponse {
         List<DataverseFieldTypeInputLevel> newInputLevels = new ArrayList<>();
         for (JsonValue value : inputLevelsArray) {
             JsonObject inputLevel = (JsonObject) value;
@@ -661,19 +701,38 @@ private List<DataverseFieldTypeInputLevel> parseInputLevels(String jsonBody, Dat
             DatasetFieldType datasetFieldType = datasetFieldSvc.findByName(datasetFieldTypeName);
 
             if (datasetFieldType == null) {
-                String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.updateinputlevels.error.invalidfieldtypename"), datasetFieldTypeName);
+                String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.inputlevels.error.invalidfieldtypename"), datasetFieldTypeName);
                 throw new WrappedResponse(badRequest(errorMessage));
             }
 
             boolean required = inputLevel.getBoolean("required");
             boolean include = inputLevel.getBoolean("include");
 
+            if (required && !include) {
+                String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.inputlevels.error.cannotberequiredifnotincluded"), datasetFieldTypeName);
+                throw new WrappedResponse(badRequest(errorMessage));
+            }
+
             newInputLevels.add(new DataverseFieldTypeInputLevel(datasetFieldType, dataverse, required, include));
         }
 
         return newInputLevels;
     }
 
+    private List<DatasetFieldType> parseFacets(JsonArray facetsArray) throws WrappedResponse {
+        List<DatasetFieldType> facets = new LinkedList<>();
+        for (JsonString facetId : facetsArray.getValuesAs(JsonString.class)) {
+            DatasetFieldType dsfType = findDatasetFieldType(facetId.getString());
+            if (dsfType == null) {
+                throw new WrappedResponse(badRequest(MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.facets.error.fieldtypenotfound"), facetId)));
+            } else if (!dsfType.isFacetable()) {
+                throw new WrappedResponse(badRequest(MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.facets.error.fieldtypenotfacetable"), facetId)));
+            }
+            facets.add(dsfType);
+        }
+        return facets;
+    }
+
     @DELETE
     @AuthRequired
     @Path("{linkingDataverseId}/deleteLink/{linkedDataverseId}")
@@ -790,22 +849,29 @@ public Response setMetadataRoot(@Context ContainerRequestContext crc, @PathParam
     /**
      * return list of facets for the dataverse with alias `dvIdtf`
      */
-    public Response listFacets(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) {
+    public Response listFacets(@Context ContainerRequestContext crc,
+                               @PathParam("identifier") String dvIdtf,
+                               @QueryParam("returnDetails") boolean returnDetails) {
         try {
-            User u = getRequestUser(crc);
-            DataverseRequest r = createDataverseRequest(u);
+            User user = getRequestUser(crc);
+            DataverseRequest request = createDataverseRequest(user);
             Dataverse dataverse = findDataverseOrDie(dvIdtf);
-            JsonArrayBuilder fs = Json.createArrayBuilder();
-            for (DataverseFacet f : execCommand(new ListFacetsCommand(r, dataverse))) {
-                fs.add(f.getDatasetFieldType().getName());
+            List<DataverseFacet> dataverseFacets = execCommand(new ListFacetsCommand(request, dataverse));
+
+            if (returnDetails) {
+                return ok(jsonDataverseFacets(dataverseFacets));
+            } else {
+                JsonArrayBuilder facetsBuilder = Json.createArrayBuilder();
+                for (DataverseFacet facet : dataverseFacets) {
+                    facetsBuilder.add(facet.getDatasetFieldType().getName());
+                }
+                return ok(facetsBuilder);
             }
-            return ok(fs);
         } catch (WrappedResponse e) {
             return e.getResponse();
         }
     }
 
-
     @GET
     @AuthRequired
     @Path("{identifier}/featured")
@@ -922,16 +988,12 @@ public Response deleteFeaturedCollections(@Context ContainerRequestContext crc,
      * (judging by the UI). This triggers a 500 when '-d @foo.json' is used.
      */
     public Response setFacets(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String facetIds) {
-
-        List<DatasetFieldType> facets = new LinkedList<>();
-        for (JsonString facetId : Util.asJsonArray(facetIds).getValuesAs(JsonString.class)) {
-            DatasetFieldType dsfType = findDatasetFieldType(facetId.getString());
-            if (dsfType == null) {
-                return error(Response.Status.BAD_REQUEST, "Can't find dataset field type '" + facetId + "'");
-            } else if (!dsfType.isFacetable()) {
-                return error(Response.Status.BAD_REQUEST, "Dataset field type '" + facetId + "' is not facetable");
-            }
-            facets.add(dsfType);
+        JsonArray jsonArray = Util.asJsonArray(facetIds);
+        List<DatasetFieldType> facets;
+        try {
+            facets = parseFacets(jsonArray);
+        } catch (WrappedResponse e) {
+            return e.getResponse();
         }
 
         try {
@@ -1595,4 +1657,25 @@ public Response linkDataverse(@Context ContainerRequestContext crc, @PathParam("
         }
     }
 
+    @GET
+    @AuthRequired
+    @Path("{identifier}/userPermissions")
+    public Response getUserPermissionsOnDataverse(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) {
+        Dataverse dataverse;
+        try {
+            dataverse = findDataverseOrDie(dvIdtf);
+        } catch (WrappedResponse wr) {
+            return wr.getResponse();
+        }
+        User requestUser = getRequestUser(crc);
+        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
+        jsonObjectBuilder.add("canAddDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.AddDataverse));
+        jsonObjectBuilder.add("canAddDataset", permissionService.userOn(requestUser, dataverse).has(Permission.AddDataset));
+        jsonObjectBuilder.add("canViewUnpublishedDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.ViewUnpublishedDataverse));
+        jsonObjectBuilder.add("canEditDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.EditDataverse));
+        jsonObjectBuilder.add("canManageDataversePermissions", permissionService.userOn(requestUser, dataverse).has(Permission.ManageDataversePermissions));
+        jsonObjectBuilder.add("canPublishDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.PublishDataverse));
+        jsonObjectBuilder.add("canDeleteDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.DeleteDataverse));
+        return ok(jsonObjectBuilder);
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java
index 5d0365d022e..33a11a2df23 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java
@@ -55,7 +55,7 @@ public Response list() {
             savedSearchesBuilder.add(thisSavedSearch);
         }
         JsonObjectBuilder response = Json.createObjectBuilder();
-        response.add("saved searches", savedSearchesBuilder);
+        response.add("savedSearches", savedSearchesBuilder);
         return ok(response);
     }
 
@@ -90,7 +90,6 @@ private JsonObjectBuilder toJson(SavedSearch savedSearch) {
 
     @POST
     public Response add(JsonObject body) {
-
         if (body == null) {
             return error(BAD_REQUEST, "JSON is expected.");
         }
@@ -159,7 +158,7 @@ public Response add(JsonObject body) {
 
         try {
             SavedSearch persistedSavedSearch = savedSearchSvc.add(toPersist);
-            return ok("Added: " + persistedSavedSearch);
+            return ok("Added: " + persistedSavedSearch, Json.createObjectBuilder().add("id", persistedSavedSearch.getId()));
         } catch (EJBException ex) {
             StringBuilder errors = new StringBuilder();
             Throwable throwable = ex.getCause();
@@ -173,16 +172,18 @@ public Response add(JsonObject body) {
 
     @DELETE
     @Path("{id}")
-    public Response delete(@PathParam("id") long doomedId) {
-        boolean disabled = true;
-        if (disabled) {
-            return error(BAD_REQUEST, "Saved Searches can not safely be deleted because links can not safely be deleted. See https://github.com/IQSS/dataverse/issues/1364 for details.");
-        }
+    public Response delete(@PathParam("id") long doomedId, @QueryParam("unlink") boolean unlink) {
         SavedSearch doomed = savedSearchSvc.find(doomedId);
         if (doomed == null) {
             return error(NOT_FOUND, "Could not find saved search id " + doomedId);
         }
-        boolean wasDeleted = savedSearchSvc.delete(doomedId);
+        boolean wasDeleted;
+        try {
+            wasDeleted = savedSearchSvc.delete(doomedId, unlink);
+        } catch (Exception e) {
+            return error(INTERNAL_SERVER_ERROR, "Problem while trying to unlink links of saved search id " + doomedId);
+        }
+
         if (wasDeleted) {
             return ok(Json.createObjectBuilder().add("Deleted", doomedId));
         } else {
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/PasswordEncryption.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/PasswordEncryption.java
index 4446f68228d..aef8b375b63 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/PasswordEncryption.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/PasswordEncryption.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.authorization.providers.builtin;
 
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import org.apache.commons.lang3.RandomStringUtils;
@@ -36,13 +36,13 @@ public interface Algorithm {
         public String encrypt(String plainText) {
             try {
                 MessageDigest md = MessageDigest.getInstance("SHA");
-                md.update(plainText.getBytes("UTF-8"));
+                md.update(plainText.getBytes(StandardCharsets.UTF_8));
                 byte[] raw = md.digest();
                 //String hash = Base64.encodeToString(raw, true);
                 String hash = Base64.getEncoder().encodeToString(raw);
                 return hash;
                 
-            } catch (NoSuchAlgorithmException | UnsupportedEncodingException e) {
+            } catch (NoSuchAlgorithmException e) {
                 throw new RuntimeException(e);
             }
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java
index c369010c8cd..a42bb35615f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java
@@ -20,30 +20,16 @@
 
 package edu.harvard.iq.dataverse.dataaccess;
 
-import edu.harvard.iq.dataverse.DataFile;
-import edu.harvard.iq.dataverse.datavariable.DataVariable;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
 import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Scanner;
-import java.util.Set;
-import java.math.BigDecimal;
-import java.math.MathContext;
-import java.math.RoundingMode;
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.file.Paths;
-import java.nio.file.StandardOpenOption;
 import java.util.logging.Logger;
 import java.util.regex.Matcher;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java
index 060b8694e9c..4c84384b271 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java
@@ -17,12 +17,11 @@
 import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
-import java.io.UnsupportedEncodingException;
 import java.nio.channels.FileChannel;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.*;
@@ -281,7 +280,7 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat
         try {
             tmpFile = FileUtil.inputStreamToFile(inputStream);
         } catch (IOException ex) {
-        	logger.severe(ex.getMessage());
+        	logger.severe("FileUtil.inputStreamToFile failed for tmpFile: " + ex.getMessage());
         }
 
         StorageIO<Dataset> dataAccess = null;
@@ -307,7 +306,7 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat
             fullSizeImage = ImageIO.read(tmpFile);
         } catch (IOException ex) {
         	IOUtils.closeQuietly(inputStream);
-            logger.severe(ex.getMessage());
+            logger.severe("ImageIO.read failed for tmpFile: " + ex.getMessage());
             return null;
         }
         if (fullSizeImage == null) {
@@ -318,25 +317,14 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat
         int width = fullSizeImage.getWidth();
         int height = fullSizeImage.getHeight();
         FileChannel src = null;
-        try {
-            src = new FileInputStream(tmpFile).getChannel();
-        } catch (FileNotFoundException ex) {
-        	IOUtils.closeQuietly(inputStream);
-            logger.severe(ex.getMessage());
-            return null;
-        }
         FileChannel dest = null;
-        try {
-            dest = new FileOutputStream(tmpFile).getChannel();
-        } catch (FileNotFoundException ex) {
-        	IOUtils.closeQuietly(inputStream);
-            logger.severe(ex.getMessage());
-            return null;
-        }
-        try {
+        try (FileInputStream fis = new FileInputStream(tmpFile); FileOutputStream fos = new FileOutputStream(tmpFile)) {
+            src = fis.getChannel();
+            dest = fos.getChannel();
             dest.transferFrom(src, 0, src.size());
         } catch (IOException ex) {
-            logger.severe(ex.getMessage());
+        	IOUtils.closeQuietly(inputStream);
+            logger.severe("Error occurred during transfer using FileChannels: " + ex.getMessage());
             return null;
         }
         File tmpFileForResize = null;
@@ -344,7 +332,7 @@ public static Dataset persistDatasetLogoToStorageAndCreateThumbnails(Dataset dat
         	//The stream was used around line 274 above, so this creates an empty file (OK since all it is used for is getting a path, but not reusing it here would make it easier to close it above.)
             tmpFileForResize = FileUtil.inputStreamToFile(inputStream);
         } catch (IOException ex) {
-            logger.severe(ex.getMessage());
+            logger.severe("FileUtil.inputStreamToFile failed for tmpFileForResize: " + ex.getMessage());
             return null;
         } finally {
         	IOUtils.closeQuietly(inputStream);
@@ -409,14 +397,8 @@ public static InputStream getThumbnailAsInputStream(Dataset dataset, int size) {
             String base64Image = datasetThumbnail.getBase64image();
             String leadingStringToRemove = FileUtil.DATA_URI_SCHEME;
             String encodedImg = base64Image.substring(leadingStringToRemove.length());
-            byte[] decodedImg = null;
-            try {
-                decodedImg = Base64.getDecoder().decode(encodedImg.getBytes("UTF-8"));
-                logger.fine("returning this many bytes for  " + "dataset id: " + dataset.getId() + ", persistentId: " + dataset.getIdentifier() + " :" + decodedImg.length);
-            } catch (UnsupportedEncodingException ex) {
-                logger.info("dataset thumbnail could not be decoded for dataset id " + dataset.getId() + ": " + ex);
-                return null;
-            }
+            byte[] decodedImg = Base64.getDecoder().decode(encodedImg.getBytes(StandardCharsets.UTF_8));
+            logger.fine("returning this many bytes for  " + "dataset id: " + dataset.getId() + ", persistentId: " + dataset.getIdentifier() + " :" + decodedImg.length);
             ByteArrayInputStream nonDefaultDatasetThumbnail = new ByteArrayInputStream(decodedImg);
             logger.fine("For dataset id " + dataset.getId() + " a thumbnail was found and is being returned.");
             return nonDefaultDatasetThumbnail;
@@ -627,7 +609,7 @@ public static boolean validateDatasetMetadataExternally(Dataset ds, String execu
         
         try {
             File tempFile = File.createTempFile("datasetMetadataCheck", ".tmp");
-            FileUtils.writeStringToFile(tempFile, jsonMetadata);
+            FileUtils.writeStringToFile(tempFile, jsonMetadata, StandardCharsets.UTF_8);
             
             // run the external executable: 
             String[] params = { executable, tempFile.getAbsolutePath() };
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java
index 1a1f4f9318b..bd38245d334 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java
@@ -1,5 +1,6 @@
 package edu.harvard.iq.dataverse.engine.command.impl;
 
+import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetField;
 import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
@@ -18,9 +19,11 @@
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.pidproviders.PidProvider;
 import edu.harvard.iq.dataverse.pidproviders.PidUtil;
+import edu.harvard.iq.dataverse.pidproviders.doi.fake.FakeDOIProvider;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
 import java.sql.Timestamp;
+import java.util.Arrays;
 import java.util.Date;
 import java.util.Set;
 import java.util.logging.Level;
@@ -169,13 +172,12 @@ protected void registerExternalIdentifier(Dataset theDataset, CommandContext ctx
                             } while (pidProvider.alreadyRegistered(theDataset) && attempts <= FOOLPROOF_RETRIAL_ATTEMPTS_LIMIT);
                         }
                         if(!retry) {
-                            logger.warning("Reserving PID for: "  + getDataset().getId() + " during publication failed.");
-                            throw new IllegalCommandException(BundleUtil.getStringFromBundle("publishDatasetCommand.pidNotReserved"), this);
+                            logger.warning("Reserving PID for: "  + getDataset().getId() + " failed.");
+                            throw new CommandExecutionException(BundleUtil.getStringFromBundle("abstractDatasetCommand.pidNotReserved", Arrays.asList(theDataset.getIdentifier())), this);
                         }
                         if(attempts > FOOLPROOF_RETRIAL_ATTEMPTS_LIMIT) {
                             //Didn't work - we existed the loop with too many tries
-                            throw new CommandExecutionException("This dataset may not be published because its identifier is already in use by another dataset; "
-                                + "gave up after " + attempts + " attempts. Current (last requested) identifier: " + theDataset.getIdentifier(), this);
+                            throw new CommandExecutionException(BundleUtil.getStringFromBundle("abstractDatasetCommand.pidReservationRetryExceeded", Arrays.asList(Integer.toString(attempts), theDataset.getIdentifier())), this);
                         }
                     }
                     // Invariant: Dataset identifier does not exist in the remote registry
@@ -188,6 +190,9 @@ protected void registerExternalIdentifier(Dataset theDataset, CommandContext ctx
                     }
 
                 } catch (Throwable e) {
+                    if (e instanceof CommandException) {
+                        throw (CommandException) e;
+                    }
                     throw new CommandException(BundleUtil.getStringFromBundle("dataset.publish.error", pidProvider.getProviderInformation()), this);
                 }
             } else {
@@ -217,6 +222,73 @@ protected Timestamp getTimestamp() {
         return timestamp;
     }
 
+    protected void registerFilePidsIfNeeded(Dataset theDataset, CommandContext ctxt, boolean b) throws CommandException {
+        // Register file PIDs if needed
+        PidProvider pidGenerator = ctxt.dvObjects().getEffectivePidGenerator(getDataset());
+        boolean shouldRegister = !pidGenerator.registerWhenPublished() &&
+                ctxt.systemConfig().isFilePIDsEnabledForCollection(getDataset().getOwner()) &&
+                pidGenerator.canCreatePidsLike(getDataset().getGlobalId());
+        if (shouldRegister) {
+            for (DataFile dataFile : theDataset.getFiles()) {
+                logger.fine(dataFile.getId() + " is registered?: " + dataFile.isIdentifierRegistered());
+                if (!dataFile.isIdentifierRegistered()) {
+                    // pre-register a persistent id
+                    registerFileExternalIdentifier(dataFile, pidGenerator, ctxt, true);
+                }
+            }
+        }
+    }
+
+    private void registerFileExternalIdentifier(DataFile dataFile, PidProvider pidProvider, CommandContext ctxt, boolean retry) throws CommandException {
+
+        if (!dataFile.isIdentifierRegistered()) {
+
+            if (pidProvider instanceof FakeDOIProvider) {
+                retry = false; // No reason to allow a retry with the FakeProvider (even if it allows
+                               // pre-registration someday), so set false for efficiency
+            }
+            try {
+                if (pidProvider.alreadyRegistered(dataFile)) {
+                    int attempts = 0;
+                    if (retry) {
+                        do {
+                            pidProvider.generatePid(dataFile);
+                            logger.log(Level.INFO, "Attempting to register external identifier for datafile {0} (trying: {1}).",
+                                    new Object[] { dataFile.getId(), dataFile.getIdentifier() });
+                            attempts++;
+                        } while (pidProvider.alreadyRegistered(dataFile) && attempts <= FOOLPROOF_RETRIAL_ATTEMPTS_LIMIT);
+                    }
+                    if (!retry) {
+                        logger.warning("Reserving File PID for: " + getDataset().getId() + ", fileId: " + dataFile.getId() + ", during publication failed.");
+                        throw new CommandExecutionException(BundleUtil.getStringFromBundle("abstractDatasetCommand.filePidNotReserved", Arrays.asList(getDataset().getIdentifier())), this);
+                    }
+                    if (attempts > FOOLPROOF_RETRIAL_ATTEMPTS_LIMIT) {
+                        // Didn't work - we existed the loop with too many tries
+                        throw new CommandExecutionException("This dataset may not be published because its identifier is already in use by another dataset; "
+                                + "gave up after " + attempts + " attempts. Current (last requested) identifier: " + dataFile.getIdentifier(), this);
+                    }
+                }
+                // Invariant: DataFile identifier does not exist in the remote registry
+                try {
+                    pidProvider.createIdentifier(dataFile);
+                    dataFile.setGlobalIdCreateTime(getTimestamp());
+                    dataFile.setIdentifierRegistered(true);
+                } catch (Throwable ex) {
+                    logger.info("Call to globalIdServiceBean.createIdentifier failed: " + ex);
+                }
+
+            } catch (Throwable e) {
+                if (e instanceof CommandException) {
+                    throw (CommandException) e;
+                }
+                throw new CommandException(BundleUtil.getStringFromBundle("file.register.error", pidProvider.getProviderInformation()), this);
+            }
+        } else {
+            throw new IllegalCommandException("This datafile may not have a PID because its id registry service is not supported.", this);
+        }
+
+    }
+
     protected void checkSystemMetadataKeyIfNeeded(DatasetVersion newVersion, DatasetVersion persistedVersion) throws IllegalCommandException {
         Set<MetadataBlock> changedMDBs = DatasetVersionDifference.getBlocksWithChanges(newVersion, persistedVersion);
         for (MetadataBlock mdb : changedMDBs) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java
index 3efefe90681..489b36e7cef 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java
@@ -1,17 +1,11 @@
 package edu.harvard.iq.dataverse.engine.command.impl;
 
-import edu.harvard.iq.dataverse.DatasetFieldType;
-import edu.harvard.iq.dataverse.Dataverse;
-import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel;
+import edu.harvard.iq.dataverse.*;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
-import edu.harvard.iq.dataverse.RoleAssignment;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.groups.Group;
-import edu.harvard.iq.dataverse.authorization.groups.GroupProvider;
-import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupProvider;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
-import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
 import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
@@ -19,15 +13,12 @@
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import java.io.IOException;
 
 import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Date;
 import java.util.List;
-import java.util.logging.Logger;
-import org.apache.solr.client.solrj.SolrServerException;
 
 /**
  * TODO make override the date and user more active, so prevent code errors.
@@ -38,14 +29,23 @@
 @RequiredPermissions(Permission.AddDataverse)
 public class CreateDataverseCommand extends AbstractCommand<Dataverse> {
 
-    private static final Logger logger = Logger.getLogger(CreateDataverseCommand.class.getName());
-
     private final Dataverse created;
     private final List<DataverseFieldTypeInputLevel> inputLevelList;
     private final List<DatasetFieldType> facetList;
+    private final List<MetadataBlock> metadataBlocks;
+
+    public CreateDataverseCommand(Dataverse created,
+                                  DataverseRequest aRequest,
+                                  List<DatasetFieldType> facetList,
+                                  List<DataverseFieldTypeInputLevel> inputLevelList) {
+        this(created, aRequest, facetList, inputLevelList, null);
+    }
 
-    public CreateDataverseCommand(Dataverse created, DataverseRequest aRequest, List<DatasetFieldType> facetList,
-            List<DataverseFieldTypeInputLevel> inputLevelList) {
+    public CreateDataverseCommand(Dataverse created,
+                                  DataverseRequest aRequest,
+                                  List<DatasetFieldType> facetList,
+                                  List<DataverseFieldTypeInputLevel> inputLevelList,
+                                  List<MetadataBlock> metadataBlocks) {
         super(aRequest, created.getOwner());
         this.created = created;
         if (facetList != null) {
@@ -58,6 +58,11 @@ public CreateDataverseCommand(Dataverse created, DataverseRequest aRequest, List
         } else {
             this.inputLevelList = null;
         }
+        if (metadataBlocks != null) {
+            this.metadataBlocks = new ArrayList<>(metadataBlocks);
+        } else {
+            this.metadataBlocks = null;
+        }
     }
 
     @Override
@@ -70,6 +75,11 @@ public Dataverse execute(CommandContext ctxt) throws CommandException {
             }
         }
 
+        if (metadataBlocks != null && !metadataBlocks.isEmpty()) {
+            created.setMetadataBlockRoot(true);
+            created.setMetadataBlocks(metadataBlocks);
+        }
+
         if (created.getCreateDate() == null) {
             created.setCreateDate(new Timestamp(new Date().getTime()));
         }
@@ -97,8 +107,8 @@ public Dataverse execute(CommandContext ctxt) throws CommandException {
         if (ctxt.dataverses().findByAlias(created.getAlias()) != null) {
             throw new IllegalCommandException("A dataverse with alias " + created.getAlias() + " already exists", this);
         }
-        
-        if(created.getFilePIDsEnabled()!=null && !ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) {
+
+        if (created.getFilePIDsEnabled() != null && !ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) {
             throw new IllegalCommandException("File PIDs cannot be enabled per collection", this);
         }
 
@@ -109,7 +119,7 @@ public Dataverse execute(CommandContext ctxt) throws CommandException {
         DataverseRole adminRole = ctxt.roles().findBuiltinRoleByAlias(DataverseRole.ADMIN);
         String privateUrlToken = null;
 
-        ctxt.roles().save(new RoleAssignment(adminRole, getRequest().getUser(), managedDv, privateUrlToken),false);
+        ctxt.roles().save(new RoleAssignment(adminRole, getRequest().getUser(), managedDv, privateUrlToken), false);
         // Add additional role assignments if inheritance is set
         boolean inheritAllRoles = false;
         String rolesString = ctxt.settings().getValueForKey(SettingsServiceBean.Key.InheritParentRoleAssignments, "");
@@ -129,18 +139,18 @@ public Dataverse execute(CommandContext ctxt) throws CommandException {
                         // above...
                         if ((inheritAllRoles || rolesToInherit.contains(role.getRole().getAlias()))
                                 && !(role.getAssigneeIdentifier().equals(getRequest().getUser().getIdentifier())
-                                        && role.getRole().equals(adminRole))) {
+                                && role.getRole().equals(adminRole))) {
                             String identifier = role.getAssigneeIdentifier();
                             if (identifier.startsWith(AuthenticatedUser.IDENTIFIER_PREFIX)) {
                                 identifier = identifier.substring(AuthenticatedUser.IDENTIFIER_PREFIX.length());
                                 ctxt.roles().save(new RoleAssignment(role.getRole(),
-                                        ctxt.authentication().getAuthenticatedUser(identifier), managedDv, privateUrlToken),false);
+                                        ctxt.authentication().getAuthenticatedUser(identifier), managedDv, privateUrlToken), false);
                             } else if (identifier.startsWith(Group.IDENTIFIER_PREFIX)) {
                                 identifier = identifier.substring(Group.IDENTIFIER_PREFIX.length());
                                 Group roleGroup = ctxt.groups().getGroup(identifier);
                                 if (roleGroup != null) {
                                     ctxt.roles().save(new RoleAssignment(role.getRole(),
-                                            roleGroup, managedDv, privateUrlToken),false);
+                                            roleGroup, managedDv, privateUrlToken), false);
                                 }
                             }
                         }
@@ -150,12 +160,14 @@ public Dataverse execute(CommandContext ctxt) throws CommandException {
         }
 
         managedDv.setPermissionModificationTime(new Timestamp(new Date().getTime()));
-        // TODO: save is called here and above; we likely don't need both
-        managedDv = ctxt.dataverses().save(managedDv);
 
-  //      ctxt.index().indexDataverse(managedDv);
         if (facetList != null) {
             ctxt.facets().deleteFacetsFor(managedDv);
+
+            if (!facetList.isEmpty()) {
+                managedDv.setFacetRoot(true);
+            }
+
             int i = 0;
             for (DatasetFieldType df : facetList) {
                 ctxt.facets().create(i++, df, managedDv);
@@ -163,17 +175,23 @@ public Dataverse execute(CommandContext ctxt) throws CommandException {
         }
 
         if (inputLevelList != null) {
+            if (!inputLevelList.isEmpty()) {
+                managedDv.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList);
+            }
             ctxt.fieldTypeInputLevels().deleteFacetsFor(managedDv);
-            for (DataverseFieldTypeInputLevel obj : inputLevelList) {
-                obj.setDataverse(managedDv);
-                ctxt.fieldTypeInputLevels().create(obj);
+            for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) {
+                inputLevel.setDataverse(managedDv);
+                ctxt.fieldTypeInputLevels().create(inputLevel);
             }
         }
+
+        // TODO: save is called here and above; we likely don't need both
+        managedDv = ctxt.dataverses().save(managedDv);
         return managedDv;
     }
-    
+
     @Override
-    public boolean onSuccess(CommandContext ctxt, Object r) {  
+    public boolean onSuccess(CommandContext ctxt, Object r) {
         return ctxt.dataverses().index((Dataverse) r);
     }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java
index f83041d87bd..e6e8279a314 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java
@@ -1,7 +1,6 @@
 package edu.harvard.iq.dataverse.engine.command.impl;
 
 import edu.harvard.iq.dataverse.authorization.Permission;
-import edu.harvard.iq.dataverse.datavariable.VarGroup;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
@@ -13,14 +12,17 @@
 import edu.harvard.iq.dataverse.util.DatasetFieldUtil;
 import edu.harvard.iq.dataverse.workflows.WorkflowComment;
 import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.DatasetField;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.TermsOfUseAndAccess;
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.FileMetadata;
+import edu.harvard.iq.dataverse.RoleAssignment;
 import edu.harvard.iq.dataverse.DataFileCategory;
 import edu.harvard.iq.dataverse.DatasetVersionDifference;
 
-import java.util.Collection;
+import java.util.ArrayList;
+import java.util.Iterator;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
@@ -50,6 +52,9 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
         if (!getUser().isSuperuser()) {
             throw new IllegalCommandException("Only superusers can curate published dataset versions", this);
         }
+        Dataset savedDataset = null;
+        // Merge the dataset into our JPA context
+        setDataset(ctxt.em().merge(getDataset()));
 
         ctxt.permissions().checkEditDatasetLock(getDataset(), getRequest(), this);
         // Invariant: Dataset has no locks preventing the update
@@ -58,23 +63,23 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
         DatasetVersion newVersion = getDataset().getOrCreateEditVersion();
         // Copy metadata from draft version to latest published version
         updateVersion.setDatasetFields(newVersion.initDatasetFields());
-
-        
+        newVersion.setDatasetFields(new ArrayList<DatasetField>());
 
         // final DatasetVersion editVersion = getDataset().getEditVersion();
         DatasetFieldUtil.tidyUpFields(updateVersion.getDatasetFields(), true);
 
-        // Merge the new version into our JPA context
-        ctxt.em().merge(updateVersion);
-
         TermsOfUseAndAccess oldTerms = updateVersion.getTermsOfUseAndAccess();
         TermsOfUseAndAccess newTerms = newVersion.getTermsOfUseAndAccess();
         newTerms.setDatasetVersion(updateVersion);
         updateVersion.setTermsOfUseAndAccess(newTerms);
-        //Put old terms on version that will be deleted....
-        newVersion.setTermsOfUseAndAccess(oldTerms);
-        
-        //Validate metadata and TofA conditions
+        // Clear unnecessary terms relationships ....
+        newVersion.setTermsOfUseAndAccess(null);
+        oldTerms.setDatasetVersion(null);
+        // Without this there's a db exception related to the oldTerms being referenced
+        // by the datasetversion table at the flush around line 212
+        ctxt.em().flush();
+
+        // Validate metadata and TofA conditions
         validateOrDie(updateVersion, isValidateLenient());
         
         //Also set the fileaccessrequest boolean on the dataset to match the new terms
@@ -87,19 +92,20 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
             updateVersion.getWorkflowComments().addAll(newComments);
         }
 
-        
         // we have to merge to update the database but not flush because
         // we don't want to create two draft versions!
-        Dataset tempDataset = ctxt.em().merge(getDataset());
-        
+        Dataset tempDataset = getDataset();
         updateVersion = tempDataset.getLatestVersionForCopy();
         
         // Look for file metadata changes and update published metadata if needed
         List<FileMetadata> pubFmds = updateVersion.getFileMetadatas();
         int pubFileCount = pubFmds.size();
         int newFileCount = tempDataset.getOrCreateEditVersion().getFileMetadatas().size();
-        /* The policy for this command is that it should only be used when the change is a 'minor update' with no file changes.
-         * Nominally we could call .isMinorUpdate() for that but we're making the same checks as we go through the update here. 
+        /*
+         * The policy for this command is that it should only be used when the change is
+         * a 'minor update' with no file changes. Nominally we could call
+         * .isMinorUpdate() for that but we're making the same checks as we go through
+         * the update here.
          */
         if (pubFileCount != newFileCount) {
             logger.severe("Draft version of dataset: " + tempDataset.getId() + " has: " + newFileCount + " while last published version has " + pubFileCount);
@@ -108,7 +114,10 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
         Long thumbId = null;
         if(tempDataset.getThumbnailFile()!=null) {
             thumbId = tempDataset.getThumbnailFile().getId();
-        };
+        }
+
+        // Note - Curate allows file metadata changes but not adding/deleting files. If
+        // that ever changes, this command needs to be updated.
         for (FileMetadata publishedFmd : pubFmds) {
             DataFile dataFile = publishedFmd.getDataFile();
             FileMetadata draftFmd = dataFile.getLatestFileMetadata();
@@ -155,45 +164,73 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
         // Update modification time on the published version and the dataset
         updateVersion.setLastUpdateTime(getTimestamp());
         tempDataset.setModificationTime(getTimestamp());
-        ctxt.em().merge(updateVersion);
-        Dataset savedDataset = ctxt.em().merge(tempDataset);
-
-        // Flush before calling DeleteDatasetVersion which calls
-        // PrivateUrlServiceBean.getPrivateUrlFromDatasetId() that will query the DB and
-        // fail if our changes aren't there
-        ctxt.em().flush();
+        newVersion = ctxt.em().merge(newVersion);
+        savedDataset = ctxt.em().merge(tempDataset);
 
         // Now delete draft version
-        DeleteDatasetVersionCommand cmd;
 
-        cmd = new DeleteDatasetVersionCommand(getRequest(), savedDataset);
-        ctxt.engine().submit(cmd);
-        // Running the command above reindexes the dataset, so we don't need to do it
-        // again in here.
+        ctxt.em().remove(newVersion);
+
+        Iterator<DatasetVersion> dvIt = savedDataset.getVersions().iterator();
+        while (dvIt.hasNext()) {
+            DatasetVersion dv = dvIt.next();
+            if (dv.isDraft()) {
+                dvIt.remove();
+                break; // We've removed the draft version, no need to continue iterating
+            }
+        }
+
+        savedDataset = ctxt.em().merge(savedDataset);
+        ctxt.em().flush();
+
+        RoleAssignment ra = ctxt.privateUrl().getPrivateUrlRoleAssignmentFromDataset(savedDataset);
+        if (ra != null) {
+            ctxt.roles().revoke(ra);
+        }
 
         // And update metadata at PID provider
-        ctxt.engine().submit(
-                new UpdateDvObjectPIDMetadataCommand(savedDataset, getRequest()));
-        
-        //And the exported metadata files
         try {
-            ExportService instance = ExportService.getInstance();
-            instance.exportAllFormats(getDataset());
-        } catch (ExportException ex) {
-            // Just like with indexing, a failure to export is not a fatal condition.
-            logger.log(Level.WARNING, "Curate Published DatasetVersion: exception while exporting metadata files:{0}", ex.getMessage());
+            ctxt.engine().submit(
+                    new UpdateDvObjectPIDMetadataCommand(savedDataset, getRequest()));
+        } catch (CommandException ex) {
+            // The try/catch makes this non-fatal. Should it be non-fatal - it's different from what we do in publish?
+            // This can be corrected by running the update PID API later, but who will look in the log?
+            // With the change to not use the DeleteDatasetVersionCommand above and other
+            // fixes, this error may now cleanly restore the initial state
+            // with the draft and last published versions unchanged, but this has not yet bee tested.
+            // (Alternately this could move to onSuccess if we intend it to stay non-fatal.)
+            logger.log(Level.WARNING, "Curate Published DatasetVersion: exception while updating PID metadata:{0}", ex.getMessage());
         }
-        
-
-        // Update so that getDataset() in updateDatasetUser will get the up-to-date copy
-        // (with no draft version)
+        // Update so that getDataset() in updateDatasetUser() will get the up-to-date
+        // copy (with no draft version)
         setDataset(savedDataset);
+
         updateDatasetUser(ctxt);
         
-
-
+        // ToDo - see if there are other DatasetVersionUser entries unique to the draft
+        // version that should be moved to the last published version
+        // As this command is intended for minor fixes, often done by the person pushing
+        // the update-current-version button, this is probably a minor issue.
 
         return savedDataset;
     }
 
+    @Override
+    public boolean onSuccess(CommandContext ctxt, Object r) {
+        boolean retVal = true;
+        Dataset d = (Dataset) r;
+        
+        ctxt.index().asyncIndexDataset(d, true);
+        
+        // And the exported metadata files
+        try {
+            ExportService instance = ExportService.getInstance();
+            instance.exportAllFormats(d);
+        } catch (ExportException ex) {
+            // Just like with indexing, a failure to export is not a fatal condition.
+            retVal = false;
+            logger.log(Level.WARNING, "Curate Published DatasetVersion: exception while exporting metadata files:{0}", ex.getMessage());
+        }
+        return retVal;
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java
index f21a2782609..7f5672c0cd7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java
@@ -8,17 +8,14 @@
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetLinkingDataverse;
 import edu.harvard.iq.dataverse.authorization.Permission;
-import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
 import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
-import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
 import java.io.IOException;
-import java.util.Collections;
-import java.util.concurrent.Future;
+
 import org.apache.solr.client.solrj.SolrServerException;
 
 /**
@@ -26,7 +23,7 @@
  * @author sarahferry
  */
 
-@RequiredPermissions( Permission.EditDataset )
+@RequiredPermissions( Permission.PublishDataset )
 public class DeleteDatasetLinkingDataverseCommand extends AbstractCommand<Dataset>{
     private final DatasetLinkingDataverse doomed;
     private final Dataset editedDs;
@@ -41,10 +38,6 @@ public DeleteDatasetLinkingDataverseCommand(DataverseRequest aRequest, Dataset e
     
     @Override
     public Dataset execute(CommandContext ctxt) throws CommandException {
-        if ((!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser())) {
-            throw new PermissionException("Delete dataset linking dataverse can only be called by superusers.",
-                    this, Collections.singleton(Permission.EditDataset), editedDs);
-        }
         Dataset merged = ctxt.em().merge(editedDs);
         DatasetLinkingDataverse doomedAndMerged = ctxt.em().merge(doomed);
         ctxt.em().remove(doomedAndMerged);
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java
index d6d7b49d172..94f983f0c13 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java
@@ -5,7 +5,6 @@
 import edu.harvard.iq.dataverse.DatasetLock.Reason;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
-import edu.harvard.iq.dataverse.engine.command.Command;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.workflow.step.Failure;
@@ -14,7 +13,7 @@
 import java.io.IOException;
 import java.io.PipedInputStream;
 import java.io.PipedOutputStream;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.security.DigestInputStream;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
@@ -32,7 +31,7 @@
 import org.duracloud.error.ContentStoreException;
 
 @RequiredPermissions(Permission.PublishDataset)
-public class DuraCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand implements Command<DatasetVersion> {
+public class DuraCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand {
 
     private static final Logger logger = Logger.getLogger(DuraCloudSubmitToArchiveCommand.class.getName());
     private static final String DEFAULT_PORT = "443";
@@ -117,7 +116,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t
                             public void run() {
                                 try (PipedOutputStream dataciteOut = new PipedOutputStream(dataciteIn)) {
 
-                                    dataciteOut.write(dataciteXml.getBytes(Charset.forName("utf-8")));
+                                    dataciteOut.write(dataciteXml.getBytes(StandardCharsets.UTF_8));
                                     dataciteOut.close();
                                     success=true;
                                 } catch (Exception e) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java
index 287e877f6e0..69ebe6feed8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java
@@ -102,13 +102,13 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
             try {
                 // This can potentially throw a CommandException, so let's make 
                 // sure we exit cleanly:
-
-            	registerExternalIdentifier(theDataset, ctxt, false);
+                registerExternalIdentifier(theDataset, ctxt, false);
+                registerFilePidsIfNeeded(theDataset, ctxt, false);
             } catch (CommandException comEx) {
-                logger.warning("Failed to reserve the identifier "+theDataset.getGlobalId().asString()+"; notifying the user(s), unlocking the dataset");
-                // Send failure notification to the user: 
+                logger.warning("Failed to reserve the identifier " + theDataset.getGlobalId().asString() + "; notifying the user(s), unlocking the dataset");
+                // Send failure notification to the user:
                 notifyUsersDatasetPublishStatus(ctxt, theDataset, UserNotification.Type.PUBLISHFAILED_PIDREG);
-                // Remove the dataset lock: 
+                // Remove the dataset lock:
                 ctxt.datasets().removeDatasetLocks(theDataset, DatasetLock.Reason.finalizePublication);
                 // re-throw the exception:
                 throw comEx;
@@ -395,8 +395,7 @@ private void publicizeExternalIdentifier(Dataset dataset, CommandContext ctxt) t
             // we can't get "dependent" DOIs assigned to files in a dataset
             // with the registered id that is a handle; or even a DOI, but in
             // an authority that's different from what's currently configured.
-            // Additionaly in 4.9.3 we have added a system variable to disable
-            // registering file PIDs on the installation level.
+            // File PIDs may be enabled/disabled per collection.
             boolean registerGlobalIdsForFiles = ctxt.systemConfig().isFilePIDsEnabledForCollection(
                     getDataset().getOwner()) 
                     && pidProvider.canCreatePidsLike(dataset.getGlobalId());
@@ -422,8 +421,8 @@ private void publicizeExternalIdentifier(Dataset dataset, CommandContext ctxt) t
                                                        // pidProvider.
             dataset.setIdentifierRegistered(true);
         } catch (Throwable e) {
-            logger.warning("Failed to register the identifier " + dataset.getGlobalId().asString()
-                    + ", or to register a file in the dataset; notifying the user(s), unlocking the dataset");
+            logger.warning("Failed to publicize the identifier " + dataset.getGlobalId().asString()
+                    + ", or to publicize a file in the dataset; notifying the user(s), unlocking the dataset");
 
             // Send failure notification to the user:
             notifyUsersDatasetPublishStatus(ctxt, dataset, UserNotification.Type.PUBLISHFAILED_PIDREG);
@@ -440,8 +439,9 @@ private void updateFiles(Timestamp updateTime, CommandContext ctxt) throws Comma
             if (dataFile.getPublicationDate() == null) {
                 // this is a new, previously unpublished file, so publish by setting date
                 dataFile.setPublicationDate(updateTime);
-                
-                // check if any prexisting roleassignments have file download and send notifications
+
+                // check if any pre-existing role assignments have file download and send
+                // notifications
                 notifyUsersFileDownload(ctxt, dataFile);
             }
             
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java
index 512987866d4..7d749262b87 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java
@@ -11,7 +11,6 @@
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
-import edu.harvard.iq.dataverse.engine.command.Command;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.settings.JvmSettings;
@@ -26,14 +25,14 @@
 import java.io.IOException;
 import java.io.PipedInputStream;
 import java.io.PipedOutputStream;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.security.DigestInputStream;
 import java.security.MessageDigest;
 import java.util.Map;
 import java.util.logging.Logger;
 
 @RequiredPermissions(Permission.PublishDataset)
-public class GoogleCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand implements Command<DatasetVersion> {
+public class GoogleCloudSubmitToArchiveCommand extends AbstractSubmitToArchiveCommand {
 
     private static final Logger logger = Logger.getLogger(GoogleCloudSubmitToArchiveCommand.class.getName());
     private static final String GOOGLECLOUD_BUCKET = ":GoogleCloudBucket";
@@ -82,7 +81,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t
                             public void run() {
                                 try (PipedOutputStream dataciteOut = new PipedOutputStream(dataciteIn)) {
 
-                                    dataciteOut.write(dataciteXml.getBytes(Charset.forName("utf-8")));
+                                    dataciteOut.write(dataciteXml.getBytes(StandardCharsets.UTF_8));
                                     dataciteOut.close();
                                     success = true;
                                 } catch (Exception e) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseInputLevelsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseInputLevelsCommand.java
new file mode 100644
index 00000000000..1727ac9698f
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseInputLevelsCommand.java
@@ -0,0 +1,40 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.DataverseFacet;
+import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * List the field type input levels {@link DataverseFieldTypeInputLevel} of a {@link Dataverse}.
+ */
+public class ListDataverseInputLevelsCommand extends AbstractCommand<List<DataverseFieldTypeInputLevel>> {
+
+    private final Dataverse dataverse;
+
+    public ListDataverseInputLevelsCommand(DataverseRequest request, Dataverse dataverse) {
+        super(request, dataverse);
+        this.dataverse = dataverse;
+    }
+
+    @Override
+    public List<DataverseFieldTypeInputLevel> execute(CommandContext ctxt) throws CommandException {
+        return dataverse.getDataverseFieldTypeInputLevels();
+    }
+
+    @Override
+    public Map<String, Set<Permission>> getRequiredPermissions() {
+        return Collections.singletonMap("",
+                dataverse.isReleased() ? Collections.emptySet()
+                        : Collections.singleton(Permission.ViewUnpublishedDataverse));
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFacetsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFacetsCommand.java
index cbab378ccac..36bd1ef4981 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFacetsCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListFacetsCommand.java
@@ -7,6 +7,7 @@
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -14,27 +15,34 @@
 
 /**
  * List the search facets {@link DataverseFacet} of a {@link Dataverse}.
+ *
  * @author michaelsuo
  */
 // no annotations here, since permissions are dynamically decided
 public class ListFacetsCommand extends AbstractCommand<List<DataverseFacet>> {
 
-    private final Dataverse dv;
+    private final Dataverse dataverse;
+    private boolean rootFacets;
+
+    public ListFacetsCommand(DataverseRequest request, Dataverse dataverse) {
+        this(request, dataverse, true);
+    }
 
-    public ListFacetsCommand(DataverseRequest aRequest, Dataverse aDataverse) {
-        super(aRequest, aDataverse);
-        dv = aDataverse;
+    public ListFacetsCommand(DataverseRequest request, Dataverse dataverse, boolean rootFacets) {
+        super(request, dataverse);
+        this.dataverse = dataverse;
+        this.rootFacets = rootFacets;
     }
 
     @Override
     public List<DataverseFacet> execute(CommandContext ctxt) throws CommandException {
-        return dv.getDataverseFacets();
+        return dataverse.getDataverseFacets(!rootFacets);
     }
 
     @Override
     public Map<String, Set<Permission>> getRequiredPermissions() {
         return Collections.singletonMap("",
-                dv.isReleased() ? Collections.<Permission>emptySet()
-                : Collections.singleton(Permission.ViewUnpublishedDataverse));
+                dataverse.isReleased() ? Collections.<Permission>emptySet()
+                        : Collections.singleton(Permission.ViewUnpublishedDataverse));
     }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommand.java
index 94bcfa2f5b7..bee5dc648b9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommand.java
@@ -7,8 +7,10 @@
 
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetLinkingDataverse;
+import edu.harvard.iq.dataverse.DatasetLock;
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.Guestbook;
+import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand;
@@ -135,7 +137,14 @@ public void executeImpl(CommandContext ctxt) throws CommandException {
             }
             throw new UnforcedCommandException(errorString.toString(), this);
         }
-
+        
+        // 6575 if dataset is submitted for review and the default contributor
+        // role includes dataset publish then remove the lock
+        
+        if (moved.isLockedFor(DatasetLock.Reason.InReview)
+                && destination.getDefaultContributorRole().permissions().contains(Permission.PublishDataset)) {
+            ctxt.datasets().removeDatasetLocks(moved, DatasetLock.Reason.InReview);
+        }
 
         // OK, move
         moved.setOwner(destination);
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java
index 6b95f3b6de1..1ac41105237 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java
@@ -4,20 +4,13 @@
 import edu.harvard.iq.dataverse.DatasetLock;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
-import edu.harvard.iq.dataverse.engine.command.Command;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
-import edu.harvard.iq.dataverse.pidproviders.PidProvider;
-import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
-import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.workflow.Workflow;
 import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType;
-import java.util.Date;
-import java.util.List;
 import java.util.Optional;
 import java.util.logging.Logger;
 import static java.util.stream.Collectors.joining;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReservePidCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReservePidCommand.java
index b7e3ddd8ce6..77b06e4e152 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReservePidCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReservePidCommand.java
@@ -3,27 +3,21 @@
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
-import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
-import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
-import edu.harvard.iq.dataverse.pidproviders.PidProvider;
-import edu.harvard.iq.dataverse.pidproviders.PidUtil;
-import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import java.util.Arrays;
 import java.util.Collections;
-import java.util.Date;
 import java.util.logging.Logger;
 
 /**
  * No required permissions because we check for superuser status.
+ * @param <T>
  */
 @RequiredPermissions({})
-public class ReservePidCommand extends AbstractVoidCommand {
+public class ReservePidCommand extends AbstractDatasetCommand<Dataset> {
 
     private static final Logger logger = Logger.getLogger(ReservePidCommand.class.getCanonicalName());
 
@@ -35,27 +29,15 @@ public ReservePidCommand(DataverseRequest request, Dataset dataset) {
     }
 
     @Override
-    protected void executeImpl(CommandContext ctxt) throws CommandException {
+    public Dataset execute(CommandContext ctxt) throws CommandException {
 
         if (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser()) {
             throw new PermissionException(BundleUtil.getStringFromBundle("admin.api.auth.mustBeSuperUser"),
                     this, Collections.singleton(Permission.EditDataset), dataset);
         }
-
-        PidProvider pidProvider = ctxt.dvObjects().getEffectivePidGenerator(dataset);
-        
-        try {
-            String returnString = pidProvider.createIdentifier(dataset);
-            logger.fine(returnString);
-            // No errors caught, so mark PID as reserved.
-            dataset.setGlobalIdCreateTime(new Date());
-            // We don't setIdentifierRegistered(true) yet.
-            ctxt.datasets().merge(dataset);
-        } catch (Throwable ex) {
-            String message = BundleUtil.getStringFromBundle("pids.commands.reservePid.failure", Arrays.asList(dataset.getId().toString(), ex.getLocalizedMessage()));
-            logger.info(message);
-            throw new IllegalCommandException(message, this);
-        }
+        registerExternalIdentifier(getDataset(), ctxt, true);
+        registerFilePidsIfNeeded(getDataset(), ctxt, true);
+        return dataset;
     }
 
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java
index f02edd54b86..a660b1a4d59 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java
@@ -5,7 +5,6 @@
 import edu.harvard.iq.dataverse.DatasetLock.Reason;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
-import edu.harvard.iq.dataverse.engine.command.Command;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.util.bagit.BagGenerator;
@@ -17,6 +16,7 @@
 import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.FileInputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.Map;
 import java.util.logging.Logger;
 
@@ -41,7 +41,7 @@
 import com.amazonaws.services.s3.transfer.TransferManagerBuilder;
 
 @RequiredPermissions(Permission.PublishDataset)
-public class S3SubmitToArchiveCommand extends AbstractSubmitToArchiveCommand implements Command<DatasetVersion> {
+public class S3SubmitToArchiveCommand extends AbstractSubmitToArchiveCommand {
 
     private static final Logger logger = Logger.getLogger(S3SubmitToArchiveCommand.class.getName());
     private static final String S3_CONFIG = ":S3ArchiverConfig";
@@ -86,7 +86,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t
 
                     spaceName = getSpaceName(dataset);
                     String dataciteXml = getDataCiteXml(dv);
-                    try (ByteArrayInputStream dataciteIn = new ByteArrayInputStream(dataciteXml.getBytes("UTF-8"))) {
+                    try (ByteArrayInputStream dataciteIn = new ByteArrayInputStream(dataciteXml.getBytes(StandardCharsets.UTF_8))) {
                         // Add datacite.xml file
                         ObjectMetadata om = new ObjectMetadata();
                         om.setContentLength(dataciteIn.available());
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java
index 994f4c7dfb6..768bb88fd43 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java
@@ -154,7 +154,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
             		throw e;
             	}
             }
-
+            //Set creator and create date for files if needed
             for (DataFile dataFile : theDataset.getFiles()) {
                 if (dataFile.getCreateDate() == null) {
                     dataFile.setCreateDate(getTimestamp());
@@ -259,6 +259,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
             for(FileMetadata fmd: theDataset.getOrCreateEditVersion().getFileMetadatas()) {
                 logger.fine("FMD: " + fmd.getId() + " for file: " + fmd.getDataFile().getId() + "is in final draft version");    
             }
+            registerFilePidsIfNeeded(theDataset, ctxt, true);
             
             if (recalculateUNF) {
                 ctxt.ingest().recalculateDatasetVersionUNF(theDataset.getOrCreateEditVersion());
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java
index cf7b4a6f69c..b9b08992919 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseInputLevelsCommand.java
@@ -2,7 +2,6 @@
 
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel;
-import edu.harvard.iq.dataverse.MetadataBlock;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
@@ -29,23 +28,8 @@ public Dataverse execute(CommandContext ctxt) throws CommandException {
         if (inputLevelList == null || inputLevelList.isEmpty()) {
             throw new CommandException("Error while updating dataverse input levels: Input level list cannot be null or empty", this);
         }
-        addInputLevelMetadataBlocks();
+        dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList);
         dataverse.setMetadataBlockRoot(true);
         return ctxt.engine().submit(new UpdateDataverseCommand(dataverse, null, null, getRequest(), inputLevelList));
     }
-
-    private void addInputLevelMetadataBlocks() {
-        List<MetadataBlock> dataverseMetadataBlocks = dataverse.getMetadataBlocks();
-        for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) {
-            MetadataBlock inputLevelMetadataBlock = inputLevel.getDatasetFieldType().getMetadataBlock();
-            if (!dataverseHasMetadataBlock(dataverseMetadataBlocks, inputLevelMetadataBlock)) {
-                dataverseMetadataBlocks.add(inputLevelMetadataBlock);
-            }
-        }
-        dataverse.setMetadataBlocks(dataverseMetadataBlocks);
-    }
-
-    private boolean dataverseHasMetadataBlock(List<MetadataBlock> dataverseMetadataBlocks, MetadataBlock metadataBlock) {
-        return dataverseMetadataBlocks.stream().anyMatch(block -> block.getId().equals(metadataBlock.getId()));
-    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java
index a54e61c7c1e..cf3afd1a39a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java
@@ -7,10 +7,10 @@
 import io.gdcc.spi.export.Exporter;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.Locale;
 import java.util.Optional;
 
-import jakarta.json.JsonObject;
 import jakarta.ws.rs.core.MediaType;
 
 
@@ -35,7 +35,7 @@ public String getDisplayName(Locale locale) {
     @Override
     public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException {
         try{
-            outputStream.write(dataProvider.getDatasetJson().toString().getBytes("UTF8"));
+            outputStream.write(dataProvider.getDatasetJson().toString().getBytes(StandardCharsets.UTF_8));
             outputStream.flush();
         } catch (Exception e){
             throw new ExportException("Unknown exception caught during JSON export.");
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java
index feec4403570..86af45195d7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java
@@ -7,11 +7,11 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.Locale;
 import java.util.Optional;
 import java.util.logging.Logger;
 
-import jakarta.json.JsonObject;
 import jakarta.ws.rs.core.MediaType;
 
 @AutoService(Exporter.class)
@@ -25,7 +25,7 @@ public class OAI_OREExporter implements Exporter {
     public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream)
             throws ExportException {
         try {
-            outputStream.write(dataProvider.getDatasetORE().toString().getBytes("UTF8"));
+            outputStream.write(dataProvider.getDatasetORE().toString().getBytes(StandardCharsets.UTF_8));
             outputStream.flush();
         } catch (Exception e) {
             logger.severe(e.getMessage());
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java
index 5428715b905..0c4b39fd641 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java
@@ -7,6 +7,7 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.IOException;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.Locale;
 import java.util.logging.Logger;
 import jakarta.ws.rs.core.MediaType;
@@ -75,7 +76,7 @@ public class SchemaDotOrgExporter implements Exporter {
     @Override
     public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException {
         try {
-            outputStream.write(dataProvider.getDatasetSchemaDotOrg().toString().getBytes("UTF8"));
+            outputStream.write(dataProvider.getDatasetSchemaDotOrg().toString().getBytes(StandardCharsets.UTF_8));
         } catch (IOException ex) {
             logger.info("IOException calling outputStream.write: " + ex);
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java
index 9b62b62fe61..fa83552a9ec 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java
@@ -24,6 +24,7 @@
 import java.io.*;
 import java.nio.*;
 import java.nio.channels.*;
+import java.nio.charset.StandardCharsets;
 import java.util.*;
 import java.lang.reflect.*;
 import java.util.regex.*;
@@ -252,7 +253,7 @@ public String testDTAformat(MappedByteBuffer buff) {
             try {
                 headerBuffer = new byte[STATA_13_HEADER.length()];
                 buff.get(headerBuffer, 0, STATA_13_HEADER.length());
-                headerString = new String(headerBuffer, "US-ASCII");
+                headerString = new String(headerBuffer, StandardCharsets.US_ASCII);
             } catch (Exception ex) {
                 // probably a buffer underflow exception; 
                 // we don't have to do anything... null will 
@@ -273,7 +274,7 @@ public String testDTAformat(MappedByteBuffer buff) {
             try {
                 headerBuffer = new byte[STATA_14_HEADER.length()];
                 buff.get(headerBuffer, 0, STATA_14_HEADER.length());
-                headerString = new String(headerBuffer, "US-ASCII");
+                headerString = new String(headerBuffer, StandardCharsets.US_ASCII);
             } catch (Exception ex) {
                 // probably a buffer underflow exception;
                 // we don't have to do anything... null will
@@ -292,7 +293,7 @@ public String testDTAformat(MappedByteBuffer buff) {
             try {
                 headerBuffer = new byte[STATA_15_HEADER.length()];
                 buff.get(headerBuffer, 0, STATA_15_HEADER.length());
-                headerString = new String(headerBuffer, "US-ASCII");
+                headerString = new String(headerBuffer, StandardCharsets.US_ASCII);
             } catch (Exception ex) {
                 // probably a buffer underflow exception;
                 // we don't have to do anything... null will
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java
index 73818f8fb62..f0262af9e33 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReader.java
@@ -29,6 +29,7 @@
 import java.io.PrintWriter;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
+import java.nio.charset.StandardCharsets;
 import java.text.DecimalFormat;
 import java.text.NumberFormat;
 import java.text.ParseException;
@@ -685,7 +686,7 @@ private void decodeHeader(BufferedInputStream stream) throws IOException {
         }
 
         String data_label = new String(Arrays.copyOfRange(header, dl_offset,
-                (dl_offset + dataLabelLength)), "ISO-8859-1");
+                (dl_offset + dataLabelLength)), StandardCharsets.ISO_8859_1);
 
         if (dbgLog.isLoggable(Level.FINE)) {
             dbgLog.fine("data_label_length=" + data_label.length());
@@ -710,7 +711,7 @@ private void decodeHeader(BufferedInputStream stream) throws IOException {
         if (releaseNumber > 104) {
             int ts_offset = dl_offset + dataLabelLength;
             String time_stamp = new String(Arrays.copyOfRange(header, ts_offset,
-                    ts_offset + TIME_STAMP_LENGTH), "ISO-8859-1");
+                    ts_offset + TIME_STAMP_LENGTH), StandardCharsets.ISO_8859_1);
             if (dbgLog.isLoggable(Level.FINE)) {
                 dbgLog.fine("time_stamp_length=" + time_stamp.length());
             }
@@ -912,7 +913,7 @@ private void decodeDescriptorVarNameList(BufferedInputStream stream, int nvar) t
         for (DataVariable dataVariable: dataTable.getDataVariables()) {
             offset_end += length_var_name;
             String vari = new String(Arrays.copyOfRange(variableNameBytes, offset_start,
-                    offset_end), "ISO-8859-1");
+                    offset_end), StandardCharsets.ISO_8859_1);
             String varName = getNullStrippedString(vari);
             dataVariable.setName(varName);
             dbgLog.fine("next name=[" + varName + "]");
@@ -978,7 +979,7 @@ private void decodeDescriptorVariableFormat(BufferedInputStream stream, int nvar
         for (int i = 0; i < nvar; i++) {
             offset_end += length_var_format;
             String vari = new String(Arrays.copyOfRange(variableFormatList, offset_start,
-                    offset_end), "ISO-8859-1");
+                    offset_end), StandardCharsets.ISO_8859_1);
             String variableFormat = getNullStrippedString(vari);
             if (dbgLog.isLoggable(Level.FINE)) dbgLog.fine(i + "-th format=[" + variableFormat + "]");
                         
@@ -1045,7 +1046,7 @@ private void decodeDescriptorValueLabel(BufferedInputStream stream, int nvar) th
         for (int i = 0; i < nvar; i++) {
             offset_end += length_label_name;
             String vari = new String(Arrays.copyOfRange(labelNameList, offset_start,
-                    offset_end), "ISO-8859-1");
+                    offset_end), StandardCharsets.ISO_8859_1);
             labelNames[i] = getNullStrippedString(vari);
             dbgLog.fine(i + "-th label=[" + labelNames[i] + "]");
             offset_start = offset_end;
@@ -1090,7 +1091,7 @@ private void decodeVariableLabels(BufferedInputStream stream) throws IOException
         for (int i = 0; i < nvar; i++) {
             offset_end += length_var_label;
             String vari = new String(Arrays.copyOfRange(variableLabelBytes, offset_start,
-                    offset_end), "ISO-8859-1");
+                    offset_end), StandardCharsets.ISO_8859_1);
             
             String variableLabelParsed = getNullStrippedString(vari);
             if (dbgLog.isLoggable(Level.FINE)) {
@@ -1272,7 +1273,7 @@ void parseValueLabelsRelease105(BufferedInputStream stream) throws IOException {
                     valueLabelHeader,
                     value_label_table_length,
                     (value_label_table_length + length_label_name)),
-                    "ISO-8859-1");
+                    StandardCharsets.ISO_8859_1);
 
             if (dbgLog.isLoggable(Level.FINE)) {
                 dbgLog.fine("rawLabelName(length)=" + rawLabelName.length());
@@ -1335,7 +1336,7 @@ void parseValueLabelsRelease105(BufferedInputStream stream) throws IOException {
             for (int l = 0; l < no_value_label_pairs; l++) {
 
                 String string_l = new String(Arrays.copyOfRange(valueLabelTable_i, offset_start,
-                        offset_end), "ISO-8859-1");
+                        offset_end), StandardCharsets.ISO_8859_1);
 
                 int null_position = string_l.indexOf(0);
                 if (null_position != -1) {
@@ -1485,7 +1486,7 @@ private void parseValueLabelsReleasel108(BufferedInputStream stream) throws IOEx
                     valueLabelHeader,
                     value_label_table_length,
                     (value_label_table_length + length_label_name)),
-                    "ISO-8859-1");
+                    StandardCharsets.ISO_8859_1);
             String labelName = getNullStrippedString(rawLabelName);
 
             if (dbgLog.isLoggable(Level.FINE)) {
@@ -1581,7 +1582,7 @@ private void parseValueLabelsReleasel108(BufferedInputStream stream) throws IOEx
             String label_segment = new String(
                     Arrays.copyOfRange(valueLabelTable_i,
                             offset_value,
-                            (length_label_segment + offset_value)), "ISO-8859-1");
+                            (length_label_segment + offset_value)), StandardCharsets.ISO_8859_1);
 
             // L.A. -- 2011.2.25:
             // This assumes that the labels are already stored in the right
@@ -1701,7 +1702,7 @@ private void decodeData(BufferedInputStream stream, boolean saveWithVariableHead
         ingesteddata.setTabDelimitedFile(tabDelimitedDataFile);
 
         fileOutTab = new FileOutputStream(tabDelimitedDataFile);
-        pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true);
+        pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, StandardCharsets.UTF_8), true);
 
         /* Should we lose this dateFormat thing in 4.0? 
          * the UNF should be calculatable on the app side solely from the data
@@ -1932,7 +1933,7 @@ private void decodeData(BufferedInputStream stream, boolean saveWithVariableHead
                         // String case
                         int strVarLength = StringLengthTable.get(columnCounter);
                         String raw_datum = new String(Arrays.copyOfRange(dataRowBytes, byte_offset,
-                                (byte_offset + strVarLength)), "ISO-8859-1");
+                                (byte_offset + strVarLength)), StandardCharsets.ISO_8859_1);
                         // TODO: 
                         // is it the right thing to do, to default to "ISO-8859-1"?
                         // (it may be; since there's no mechanism for specifying
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java
index 0822f6eed72..913c0ebeab2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java
@@ -4,6 +4,7 @@
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.logging.Logger;
 
@@ -273,7 +274,7 @@ public float readFloat() throws IOException {
      */
     public String readString(int n) throws IOException {
 
-        String ret = new String(readBytes(n), "US-ASCII");
+        String ret = new String(readBytes(n), StandardCharsets.US_ASCII);
 
         // Remove the terminating and/or padding zero bytes:
         if (ret != null && ret.indexOf(0) > -1) {
@@ -287,7 +288,7 @@ public String readString(int n) throws IOException {
      */
     public String readUtfString(int n) throws IOException {
 
-        String ret = new String(readBytes(n), "UTF8");
+        String ret = new String(readBytes(n), StandardCharsets.UTF_8);
 
         // Remove the terminating and/or padding zero bytes:
         if (ret.indexOf(0) > -1) {
@@ -314,11 +315,11 @@ public byte[] readPrimitiveSection(String tag, int length) throws IOException {
     }
 
     public String readPrimitiveStringSection(String tag) throws IOException {
-        return new String(readPrimitiveSection(tag), "US-ASCII");
+        return new String(readPrimitiveSection(tag), StandardCharsets.US_ASCII);
     }
 
     public String readPrimitiveStringSection(String tag, int length) throws IOException {
-        return new String(readPrimitiveSection(tag, length), "US-ASCII");
+        return new String(readPrimitiveSection(tag, length), StandardCharsets.US_ASCII);
     }
 
     public String readLabelSection(String tag, int limit) throws IOException {
@@ -332,7 +333,7 @@ public String readLabelSection(String tag, int limit) throws IOException {
         logger.fine("length of label: " + lengthOfLabel);
         String label = null;
         if (lengthOfLabel > 0) {
-            label = new String(readBytes(lengthOfLabel), "US-ASCII");
+            label = new String(readBytes(lengthOfLabel), StandardCharsets.US_ASCII);
         }
         logger.fine("ret: " + label);
         readClosingTag(tag);
@@ -358,7 +359,7 @@ public String readDefinedStringSection(String tag, int limit) throws IOException
         }
         String ret = null;
         if (number > 0) {
-            ret = new String(readBytes(number), "US-ASCII");
+            ret = new String(readBytes(number), StandardCharsets.US_ASCII);
         }
         logger.fine("ret: " + ret);
         readClosingTag(tag);
@@ -400,7 +401,7 @@ public boolean checkTag(String tag) throws IOException {
 
         int n = tag.length();
         if ((this.buffer_size - buffer_byte_offset) >= n) {
-            return (tag).equals(new String(Arrays.copyOfRange(buffer, buffer_byte_offset, buffer_byte_offset+n),"US-ASCII"));
+            return (tag).equals(new String(Arrays.copyOfRange(buffer, buffer_byte_offset, buffer_byte_offset+n),StandardCharsets.US_ASCII));
         }
         else{
             bufferMoreBytes();
@@ -414,7 +415,7 @@ public void readOpeningTag(String tag) throws IOException {
             throw new IOException("opening tag must be a non-empty string.");
         }
 
-        String openTagString = new String(readBytes(tag.length() + 2), "US-ASCII");
+        String openTagString = new String(readBytes(tag.length() + 2), StandardCharsets.US_ASCII);
         if (openTagString == null || !openTagString.equals("<"+tag+">")) {
             throw new IOException("Could not read opening tag <"+tag+">");
         }
@@ -425,7 +426,7 @@ public void readClosingTag(String tag) throws IOException {
             throw new IOException("closing tag must be a non-empty string.");
         }
 
-        String closeTagString = new String(readBytes(tag.length() + 3), "US-ASCII");
+        String closeTagString = new String(readBytes(tag.length() + 3), StandardCharsets.US_ASCII);
         logger.fine("closeTagString: " + closeTagString);
 
         if (closeTagString == null || !closeTagString.equals("</" + tag + ">")) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java
index 53607d541de..b0f2c50c997 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java
@@ -7,6 +7,7 @@
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
 import java.text.DecimalFormat;
 import java.text.NumberFormat;
 import java.text.ParseException;
@@ -735,7 +736,7 @@ private void readData(DataReader reader, String variableHeaderLine) throws IOExc
         ingesteddata.setTabDelimitedFile(tabDelimitedDataFile);
 
         FileOutputStream fileOutTab = new FileOutputStream(tabDelimitedDataFile);
-        PrintWriter pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true);
+        PrintWriter pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, StandardCharsets.UTF_8), true);
 
         // add the variable header here, if needed
         if (variableHeaderLine != null) {
@@ -1001,7 +1002,7 @@ private void readSTRLs(DataReader reader) throws IOException {
 
             File finalTabFile = File.createTempFile("finalTabfile.", ".tab");
             FileOutputStream fileOutTab = new FileOutputStream(finalTabFile);
-            PrintWriter pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true);
+            PrintWriter pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, StandardCharsets.UTF_8), true);
 
             logger.fine("Setting the tab-delimited file to " + finalTabFile.getName());
             ingesteddata.setTabDelimitedFile(finalTabFile);
@@ -1130,9 +1131,9 @@ private String readGSO(DataReader reader, long v, long o) throws IOException {
 
         String gsoString;
         if (binary) {
-            gsoString = new String(contents, "utf8"); 
+            gsoString = new String(contents, StandardCharsets.UTF_8); 
         } else {
-            gsoString = new String(contents, 0, (int) length - 1, "US-ASCII");
+            gsoString = new String(contents, 0, (int) length - 1, StandardCharsets.US_ASCII);
         }
 
         logger.fine("GSO " + v + "," + o + ": " + gsoString);
@@ -1226,7 +1227,7 @@ private void readValueLabels(DataReader reader) throws IOException {
                 }
                 label_length = (int)(label_end - label_offset);
 
-                category_value_labels[i] = new String(Arrays.copyOfRange(labelBytes, (int)label_offset, (int)label_end-1), "UTF8");
+                category_value_labels[i] = new String(Arrays.copyOfRange(labelBytes, (int)label_offset, (int)label_end-1), StandardCharsets.UTF_8);
                 total_label_bytes += label_length;
             }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java
index 2ee966c3e31..13325ca8f60 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/por/PORFileReader.java
@@ -31,7 +31,7 @@
 import java.io.PrintWriter;
 import java.io.Writer;
 import java.nio.ByteBuffer;
-
+import java.nio.charset.StandardCharsets;
 import java.text.DecimalFormat;
 import java.text.NumberFormat;
 import java.text.SimpleDateFormat;
@@ -195,7 +195,7 @@ public TabularDataIngest read(BufferedInputStream stream, boolean storeWithVaria
         BufferedReader bfReader = null;
         
         try {            
-            bfReader = new BufferedReader(new InputStreamReader(new FileInputStream(tempPORfile.getAbsolutePath()), "US-ASCII"));
+            bfReader = new BufferedReader(new InputStreamReader(new FileInputStream(tempPORfile.getAbsolutePath()), StandardCharsets.US_ASCII));
             if (bfReader == null){
                 dbgLog.fine("bfReader is null");
                 throw new IOException("bufferedReader is null");
@@ -567,7 +567,7 @@ private File decodeHeader(BufferedInputStream stream) throws IOException {
         try {
             tempPORfile = File.createTempFile("tempPORfile.", ".por");
             fileOutPOR = new FileOutputStream(tempPORfile);
-            fileWriter = new BufferedWriter(new OutputStreamWriter(fileOutPOR, "utf8"));
+            fileWriter = new BufferedWriter(new OutputStreamWriter(fileOutPOR, StandardCharsets.UTF_8));
             porScanner = new Scanner(stream);
 
             // Because 64-bit and 32-bit machines decode POR's first 40-byte
@@ -1115,7 +1115,7 @@ private void decodeData(BufferedReader reader, boolean storeWithVariableHeader)
 
         try {
             fileOutTab = new FileOutputStream(tabDelimitedDataFile);
-            pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true);
+            pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, StandardCharsets.UTF_8), true);
 
             variableFormatTypeList = new String[varQnty];
             for (int i = 0; i < varQnty; i++) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java
index 50f2f89e354..215c7a5e6d2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java
@@ -22,12 +22,11 @@
 
 import java.io.*;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.text.*;
 import java.util.logging.*;
 import java.util.*;
 
-import jakarta.inject.Inject;
-
 // Rosuda Wrappers and Methods for R-calls to Rserve
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import org.rosuda.REngine.REXP;
@@ -504,10 +503,10 @@ public TabularDataIngest read(BufferedInputStream stream, boolean saveWithVariab
             // created!
             // - L.A. 
             RTabFileParser csvFileReader = new RTabFileParser('\t');
-            BufferedReader localBufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(localCsvFile), "UTF-8"));
+            BufferedReader localBufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(localCsvFile), StandardCharsets.UTF_8));
 
             File tabFileDestination = File.createTempFile("data-", ".tab");
-            PrintWriter tabFileWriter = new PrintWriter(tabFileDestination.getAbsolutePath(), "UTF-8");
+            PrintWriter tabFileWriter = new PrintWriter(tabFileDestination.getAbsolutePath(), StandardCharsets.UTF_8);
         
             int lineCount = csvFileReader.read(localBufferedReader, dataTable, saveWithVariableHeader, tabFileWriter);
 
@@ -685,7 +684,7 @@ private static String readLocalResource(String path) {
 
         // Try opening a buffered reader stream
         try {
-            BufferedReader rd = new BufferedReader(new InputStreamReader(resourceStream, "UTF-8"));
+            BufferedReader rd = new BufferedReader(new InputStreamReader(resourceStream, StandardCharsets.UTF_8));
 
             String line = null;
             while ((line = rd.readLine()) != null) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java
index 5eecbdfb666..308ff352b2a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/sav/SAVFileReader.java
@@ -29,7 +29,7 @@
 import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
-
+import java.nio.charset.StandardCharsets;
 import java.text.DecimalFormat;
 import java.text.NumberFormat;
 import java.text.SimpleDateFormat;
@@ -58,10 +58,7 @@
 
 import edu.harvard.iq.dataverse.DataTable;
 import edu.harvard.iq.dataverse.datavariable.DataVariable;
-import edu.harvard.iq.dataverse.datavariable.SummaryStatistic;
 import edu.harvard.iq.dataverse.datavariable.VariableCategory;
-import edu.harvard.iq.dataverse.datavariable.VariableRange;
-
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataFileReader;
 import edu.harvard.iq.dataverse.ingest.tabulardata.spi.TabularDataFileReaderSpi;
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataIngest;
@@ -633,7 +630,7 @@ void decodeRecordType1(BufferedInputStream stream) throws IOException {
             int offset_end = LENGTH_SPSS_PRODUCT_INFO; // 60 bytes
             
             String productInfo = new String(Arrays.copyOfRange(recordType1, offset_start,
-                offset_end),"US-ASCII");
+                offset_end),StandardCharsets.US_ASCII);
                 
             dbgLog.fine("productInfo:\n"+productInfo+"\n");
             dataTable.setOriginalFormatVersion(productInfo);
@@ -872,7 +869,7 @@ void decodeRecordType1(BufferedInputStream stream) throws IOException {
             offset_end += LENGTH_FILE_CREATION_INFO; // 84 bytes
             
             String fileCreationInfo = getNullStrippedString(new String(Arrays.copyOfRange(recordType1, offset_start,
-                offset_end),"US-ASCII"));
+                offset_end),StandardCharsets.US_ASCII));
                 
             dbgLog.fine("fileCreationInfo:\n"+fileCreationInfo+"\n");
             
@@ -1220,7 +1217,7 @@ void decodeRecordType2(BufferedInputStream stream) throws IOException {
                     // borders. So we always read the bytes, but only use them for
                     // the real variable entries.
                         /*String variableLabel = new String(Arrays.copyOfRange(variable_label,
-                                0, rawVariableLabelLength),"US-ASCII");*/
+                                0, rawVariableLabelLength),StandardCharsets.US_ASCII);*/
 
                         variableLabelMap.put(variableName, variableLabel);
                     }
@@ -2075,7 +2072,7 @@ void decodeRecordType7(BufferedInputStream stream) throws IOException {
                         byte[] work = new byte[unitLength*numberOfUnits];
                         int nbtyes13 = stream.read(work);
 
-                        String[] variableShortLongNamePairs = new String(work,"US-ASCII").split("\t");
+                        String[] variableShortLongNamePairs = new String(work,StandardCharsets.US_ASCII).split("\t");
 
                         for (int i=0; i<variableShortLongNamePairs.length; i++){
                             dbgLog.fine("RT7: "+i+"-th pair"+variableShortLongNamePairs[i]);
@@ -2166,7 +2163,7 @@ void decodeRecordType7(BufferedInputStream stream) throws IOException {
                         byte[] rt7st20bytes = new byte[unitLength*numberOfUnits];
                         int nbytes20 = stream.read(rt7st20bytes);
 
-                        String dataCharSet = new String(rt7st20bytes,"US-ASCII");
+                        String dataCharSet = new String(rt7st20bytes,StandardCharsets.US_ASCII);
 
                         if (dataCharSet != null && !(dataCharSet.equals(""))) {
                             dbgLog.fine("RT7-20: data charset: "+ dataCharSet);
@@ -2347,7 +2344,7 @@ PrintWriter createOutputWriter (BufferedInputStream stream) throws IOException {
 
             fileOutTab = new FileOutputStream(tabDelimitedDataFile);
             
-            pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, "utf8"), true);
+            pwout = new PrintWriter(new OutputStreamWriter(fileOutTab, StandardCharsets.UTF_8), true);
 
         } catch (FileNotFoundException ex) {
             ex.printStackTrace();
diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java
index ac3dff356eb..15ee35c6404 100644
--- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java
+++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java
@@ -117,10 +117,10 @@ public class DatasetMetrics implements Serializable {
      * For an example of sending various metric types (total-dataset-requests,
      * unique-dataset-investigations, etc) for a given month (2018-04) per
      * country (DK, US, etc.) see
-     * https://github.com/CDLUC3/counter-processor/blob/5ce045a09931fb680a32edcc561f88a407cccc8d/good_test.json#L893
+     * https://github.com/gdcc/counter-processor/blob/5ce045a09931fb680a32edcc561f88a407cccc8d/good_test.json#L893
      *
      * counter-processor uses GeoLite2 for IP lookups according to their
-     * https://github.com/CDLUC3/counter-processor#download-the-free-ip-to-geolocation-database
+     * https://github.com/gdcc/counter-processor#download-the-free-ip-to-geolocation-database
      */
     @Column(nullable = true)
     private String countryCode;
diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java
index 8f32750f090..30cbed18337 100644
--- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java
@@ -27,7 +27,7 @@
  * How to Make Your Data Count July 10th, 2018).
  *
  * The recommended starting point to implement Make Data Count is
- * https://github.com/CDLUC3/Make-Data-Count/blob/master/getting-started.md
+ * https://github.com/gdcc/Make-Data-Count/blob/master/getting-started.md
  * which specifically recommends reading the "COUNTER Code of Practice for
  * Research Data" mentioned in the user facing docs.
  *
@@ -35,7 +35,7 @@
  * https://dash.ucmerced.edu/stash/dataset/doi:10.6071/M3RP49
  *
  * For processing logs we could try DASH's
- * https://github.com/CDLUC3/counter-processor
+ * https://github.com/gdcc/counter-processor
  *
  * Next, DataOne implemented it, and you can see an example dataset here:
  * https://search.dataone.org/view/doi:10.5063/F1Z899CZ
diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java
index 74bb53e1191..7d968e7e5c1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java
@@ -1,7 +1,8 @@
 package edu.harvard.iq.dataverse.metrics;
 
 import edu.harvard.iq.dataverse.Dataverse;
-import java.io.StringReader;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+
 import java.math.BigDecimal;
 import java.time.LocalDate;
 import java.time.YearMonth;
@@ -17,28 +18,30 @@
 import jakarta.json.JsonArrayBuilder;
 import jakarta.json.JsonObject;
 import jakarta.json.JsonObjectBuilder;
-import jakarta.json.JsonReader;
+import jakarta.json.JsonException;
 import jakarta.ws.rs.BadRequestException;
 
 public class MetricsUtil {
 
     private static final Logger logger = Logger.getLogger(MetricsUtil.class.getCanonicalName());
 
-    public final static String CONTENTTYPE = "contenttype";
-    public final static String COUNT = "count";
-    public final static String CATEGORY = "category";
-    public final static String ID = "id";
-    public final static String PID = "pid";
-    public final static String SUBJECT = "subject";
-    public final static String DATE = "date";
-    public final static String SIZE = "size";
+    public static final String CONTENTTYPE = "contenttype";
+    public static final String COUNT = "count";
+    public static final String CATEGORY = "category";
+    public static final String ID = "id";
+    public static final String PID = "pid";
+    public static final String SUBJECT = "subject";
+    public static final String DATE = "date";
+    public static final String SIZE = "size";
 
-    public static String YEAR_AND_MONTH_PATTERN = "yyyy-MM";
+    public static final String YEAR_AND_MONTH_PATTERN = "yyyy-MM";
 
     public static final String DATA_LOCATION_LOCAL = "local";
     public static final String DATA_LOCATION_REMOTE = "remote";
     public static final String DATA_LOCATION_ALL = "all";
 
+    private MetricsUtil() {}
+
     public static JsonObjectBuilder countToJson(long count) {
         JsonObjectBuilder job = Json.createObjectBuilder();
         job.add(COUNT, count);
@@ -134,8 +137,8 @@ public static JsonArray timeSeriesToJson(List<Object[]> results, boolean isBigDe
     
     public static JsonArray timeSeriesByTypeToJson(List<Object[]> results) {
         JsonArrayBuilder jab = Json.createArrayBuilder();
-        Map<String, Long> totals = new HashMap<String, Long>();
-        Map<String, Long> sizes =  new HashMap<String, Long>();
+        Map<String, Long> totals = new HashMap<>();
+        Map<String, Long> sizes =  new HashMap<>();
         String curDate = (String) results.get(0)[0];
         // Get a list of all the monthly dates from the start until now
         List<String> dates = getDatesFrom(curDate);
@@ -169,7 +172,7 @@ public static JsonArray timeSeriesByTypeToJson(List<Object[]> results) {
     
     public static JsonArray timeSeriesByPIDToJson(List<Object[]> results) {
         JsonArrayBuilder jab = Json.createArrayBuilder();
-        Map<String, Long> totals = new HashMap<String, Long>();
+        Map<String, Long> totals = new HashMap<>();
         String curDate = (String) results.get(0)[0];
         // Get a list of all the monthly dates from the start until now
         List<String> dates = getDatesFrom(curDate);
@@ -200,8 +203,8 @@ public static JsonArray timeSeriesByPIDToJson(List<Object[]> results) {
     
     public static JsonArray timeSeriesByIDAndPIDToJson(List<Object[]> results) {
         JsonArrayBuilder jab = Json.createArrayBuilder();
-        Map<Integer, Long> totals = new HashMap<Integer, Long>();
-        Map<Integer, String> pids = new HashMap<Integer, String>();
+        Map<Integer, Long> totals = new HashMap<>();
+        Map<Integer, String> pids = new HashMap<>();
         String curDate = (String) results.get(0)[0];
         // Get a list of all the monthly dates from the start until now
         List<String> dates = getDatesFrom(curDate);
@@ -238,11 +241,11 @@ public static JsonArray timeSeriesByIDAndPIDToJson(List<Object[]> results) {
 
     /**
      *
-     * @param userInput A year and month in YYYY-MM format.
-     * @return A year and month in YYYY-M     
      * Note that along with sanitization, this checks that the requested month is
      * not after the current one. This will need to be made more robust if we
-     * start writing metrics for farther in the future (e.g. the current year) the current year)
+     * start writing metrics for farther in the future (e.g. the current year)
+     * @param userInput A year and month in YYYY-MM format.
+     * @return A year and month in YYYY-M     
      */
     public static String sanitizeYearMonthUserInput(String userInput) throws BadRequestException {
         logger.fine("string from user to sanitize (hopefully YYYY-MM format): " + userInput);
@@ -260,8 +263,7 @@ public static String sanitizeYearMonthUserInput(String userInput) throws BadRequ
             throw new BadRequestException("The requested date is set past the current month.");
         }
 
-        String sanitized = inputLocalDate.format(dateTimeFormatter);
-        return sanitized;
+        return inputLocalDate.format(dateTimeFormatter);
     }
 
     public static String validateDataLocationStringType(String dataLocation) throws BadRequestException {
@@ -279,30 +281,38 @@ public static String getCurrentMonth() {
         return LocalDate.now().format(DateTimeFormatter.ofPattern(MetricsUtil.YEAR_AND_MONTH_PATTERN));
     }
 
+    /**
+     * Parse a String into a JSON object
+     * @param str serialized JSON
+     * @return {@code null} if {@code str} is {@code null}, or the parsed JSON object
+     * @throws JsonException
+     * @see JsonUtil#getJsonObject(String)
+     */
     public static JsonObject stringToJsonObject(String str) {
         if (str == null) {
             return null;
         }
-        JsonReader jsonReader = Json.createReader(new StringReader(str));
-        JsonObject jo = jsonReader.readObject();
-        jsonReader.close();
 
-        return jo;
+        return JsonUtil.getJsonObject(str);
     }
 
+    /**
+     * Parse a String into a JSON array
+     * @param str serialized JSON
+     * @return {@code null} if {@code str} is {@code null}, or the parsed JSON array
+     * @throws JsonException
+     * @see JsonUtil#getJsonArray(String)
+     */
     public static JsonArray stringToJsonArray(String str) {
         if (str == null) {
             return null;
         }
-        JsonReader jsonReader = Json.createReader(new StringReader(str));
-        JsonArray ja = jsonReader.readArray();
-        jsonReader.close();
 
-        return ja;
+        return JsonUtil.getJsonArray(str);
     }
 
     public static List<String> getDatesFrom(String startMonth) {
-        List<String> dates = new ArrayList<String>();
+        List<String> dates = new ArrayList<>();
         LocalDate next = LocalDate.parse(startMonth+ "-01").plusMonths(1);
         dates.add(startMonth);
         DateTimeFormatter monthFormat = DateTimeFormatter.ofPattern(YEAR_AND_MONTH_PATTERN);
diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java
index 2627bc76fd9..9d61663d034 100644
--- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java
+++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/handle/HandlePidProvider.java
@@ -120,21 +120,21 @@ public void reRegisterHandle(DvObject dvObject) {
             
             try {
 
-                AdminRecord admin = new AdminRecord(authHandle.getBytes("UTF8"), handlenetIndex,
+                AdminRecord admin = new AdminRecord(authHandle.getBytes(StandardCharsets.UTF_8), handlenetIndex,
                         true, true, true, true, true, true,
                         true, true, true, true, true, true);
 
                 int timestamp = (int) (System.currentTimeMillis() / 1000);
 
-                HandleValue[] val = {new HandleValue(100, "HS_ADMIN".getBytes("UTF8"),
+                HandleValue[] val = {new HandleValue(100, "HS_ADMIN".getBytes(StandardCharsets.UTF_8),
                     Encoder.encodeAdminRecord(admin),
                     HandleValue.TTL_TYPE_RELATIVE, 86400,
-                    timestamp, null, true, true, true, false), new HandleValue(1, "URL".getBytes("UTF8"),
+                    timestamp, null, true, true, true, false), new HandleValue(1, "URL".getBytes(StandardCharsets.UTF_8),
                     datasetUrl.getBytes(),
                     HandleValue.TTL_TYPE_RELATIVE, 86400,
                     timestamp, null, true, true, true, false)};
 
-                ModifyValueRequest req = new ModifyValueRequest(handle.getBytes("UTF8"), val, auth);
+                ModifyValueRequest req = new ModifyValueRequest(handle.getBytes(StandardCharsets.UTF_8), val, auth);
 
                 resolver.traceMessages = true;
                 AbstractResponse response = resolver.processRequest(req);
@@ -168,22 +168,22 @@ public Throwable registerNewHandle(DvObject dvObject) {
 
         try {
 
-            AdminRecord admin = new AdminRecord(authHandle.getBytes("UTF8"), handlenetIndex,
+            AdminRecord admin = new AdminRecord(authHandle.getBytes(StandardCharsets.UTF_8), handlenetIndex,
                     true, true, true, true, true, true,
                     true, true, true, true, true, true);
 
             int timestamp = (int) (System.currentTimeMillis() / 1000);
 
-            HandleValue[] val = {new HandleValue(100, "HS_ADMIN".getBytes("UTF8"),
+            HandleValue[] val = {new HandleValue(100, "HS_ADMIN".getBytes(StandardCharsets.UTF_8),
                 Encoder.encodeAdminRecord(admin),
                 HandleValue.TTL_TYPE_RELATIVE, 86400,
-                timestamp, null, true, true, true, false), new HandleValue(1, "URL".getBytes("UTF8"),
+                timestamp, null, true, true, true, false), new HandleValue(1, "URL".getBytes(StandardCharsets.UTF_8),
                 datasetUrl.getBytes(),
                 HandleValue.TTL_TYPE_RELATIVE, 86400,
                 timestamp, null, true, true, true, false)};
 
             CreateHandleRequest req
-                    = new CreateHandleRequest(handle.getBytes("UTF8"), val, auth);
+                    = new CreateHandleRequest(handle.getBytes(StandardCharsets.UTF_8), val, auth);
 
             resolver.traceMessages = true;
             AbstractResponse response = resolver.processRequest(req);
diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java
index 9e5879106e4..01710e06f8f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java
@@ -96,7 +96,7 @@ private RoleAssignment getRoleAssignmentFromPrivateUrlToken(String privateUrlTok
      *
      * @todo This might be a good place for Optional.
      */
-    private RoleAssignment getPrivateUrlRoleAssignmentFromDataset(Dataset dataset) {
+    public RoleAssignment getPrivateUrlRoleAssignmentFromDataset(Dataset dataset) {
         if (dataset == null) {
             return null;
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java
index 6e8a512902a..a8b28d2d79d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java
@@ -21,6 +21,7 @@
 import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Level;
@@ -499,7 +500,7 @@ public void showJsonPreviewNewWindow() throws IOException, WrappedResponse {
 
         OutputStream output = ec.getResponseOutputStream();
         
-        OutputStreamWriter osw = new OutputStreamWriter(output, "UTF-8");
+        OutputStreamWriter osw = new OutputStreamWriter(output, StandardCharsets.UTF_8);
         osw.write(provJsonState); //the button calling this will only be rendered if provJsonState exists (e.g. a file is uploaded)
         osw.close();
         fc.responseComplete();
diff --git a/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java b/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java
index df2e44ecb27..dbcfc039fa1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java
+++ b/src/main/java/edu/harvard/iq/dataverse/rserve/RemoteDataFrameService.java
@@ -33,6 +33,7 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -758,7 +759,7 @@ private static String readLocalResource(String path) {
 
         // Try opening a buffered reader stream
         try {
-            resourceAsString = IOUtils.toString(resourceStream, "UTF-8");
+            resourceAsString = IOUtils.toString(resourceStream, StandardCharsets.UTF_8);
             resourceStream.close();
         } catch (IOException ex) {
             logger.warning(String.format("RDATAFileReader: (readLocalResource) resource stream from path \"%s\" was invalid", path));
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
index e84c8f133da..b40dcd69f3b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
@@ -534,6 +534,9 @@ public JsonObjectBuilder json(boolean showRelevance, boolean showEntityIds, bool
 				nullSafeJsonBuilder.add("entity_id", this.entityId);
 			}
 		}
+		if (!getPublicationStatuses().isEmpty()) {
+			nullSafeJsonBuilder.add("publicationStatuses", getPublicationStatusesAsJSON());
+		}
 
 		if (this.entity == null) {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java
index 7fc2bdf79a3..1dd89f75a26 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java
@@ -2,29 +2,28 @@
 
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetLinkingDataverse;
+import edu.harvard.iq.dataverse.DatasetLinkingServiceBean;
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DataverseLinkingDataverse;
+import edu.harvard.iq.dataverse.DataverseLinkingServiceBean;
 import edu.harvard.iq.dataverse.DvObject;
 import edu.harvard.iq.dataverse.DvObjectServiceBean;
 import edu.harvard.iq.dataverse.EjbDataverseEngine;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.GuestUser;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
-import edu.harvard.iq.dataverse.search.SearchServiceBean;
-import edu.harvard.iq.dataverse.search.SolrQueryResponse;
-import edu.harvard.iq.dataverse.search.SolrSearchResult;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetLinkingDataverseCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataverseLinkingDataverseCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.LinkDatasetCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.LinkDataverseCommand;
 import edu.harvard.iq.dataverse.search.SearchException;
 import edu.harvard.iq.dataverse.search.SearchFields;
+import edu.harvard.iq.dataverse.search.SearchServiceBean;
+import edu.harvard.iq.dataverse.search.SolrQueryResponse;
+import edu.harvard.iq.dataverse.search.SolrSearchResult;
 import edu.harvard.iq.dataverse.search.SortBy;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 import jakarta.ejb.EJB;
 import jakarta.ejb.Schedule;
 import jakarta.ejb.Stateless;
@@ -39,6 +38,12 @@
 import jakarta.persistence.TypedQuery;
 import jakarta.servlet.http.HttpServletRequest;
 
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
 @Stateless
 @Named
 public class SavedSearchServiceBean {
@@ -50,6 +55,10 @@ public class SavedSearchServiceBean {
     @EJB
     DvObjectServiceBean dvObjectService;
     @EJB
+    protected DatasetLinkingServiceBean dsLinkingService;
+    @EJB
+    protected DataverseLinkingServiceBean dvLinkingService;
+    @EJB
     EjbDataverseEngine commandEngine;
     @EJB
     SystemConfig systemConfig;
@@ -96,21 +105,25 @@ public SavedSearch add(SavedSearch toPersist) {
         try {
             persisted = em.merge(toPersist);
         } catch (Exception ex) {
-            System.out.println("exeption: " + ex);
+            logger.fine("Failed to add SavedSearch" + ex);
         }
         return persisted;
     }
 
-    public boolean delete(long id) {
+    public boolean delete(long id, boolean unlink) throws SearchException, CommandException {
         SavedSearch doomed = find(id);
         boolean wasDeleted = false;
         if (doomed != null) {
-            System.out.println("deleting saved search id " + doomed.getId());
+            logger.info("Deleting saved search id " + doomed.getId());
+            if(unlink) {
+                DataverseRequest dataverseRequest = new DataverseRequest(doomed.getCreator(), getHttpServletRequest());
+                removeLinks(dataverseRequest, doomed);
+            }
             em.remove(doomed);
             em.flush();
             wasDeleted = true;
         } else {
-            System.out.println("problem deleting saved search id " + id);
+            logger.info("Problem deleting saved search id " + id);
         }
         return wasDeleted;
     }
@@ -240,6 +253,45 @@ public JsonObjectBuilder makeLinksForSingleSavedSearch(DataverseRequest dvReq, S
         return response;
     }
 
+    /**
+     * This method to the reverse of a makeLinksForSingleSavedSearch method.
+     * It removes all Dataset and Dataverse links that match savedSearch's query.
+     * @param dvReq
+     * @param savedSearch
+     * @throws SearchException
+     * @throws CommandException
+     */
+    public void removeLinks(DataverseRequest dvReq, SavedSearch savedSearch) throws SearchException, CommandException {
+        logger.fine("UNLINK SAVED SEARCH (" + savedSearch.getId() + ") START search and unlink process");
+        Date start = new Date();
+        Dataverse linkingDataverse = savedSearch.getDefinitionPoint();
+
+        SolrQueryResponse queryResponse = findHits(savedSearch);
+        for (SolrSearchResult solrSearchResult : queryResponse.getSolrSearchResults()) {
+
+            DvObject dvObjectThatDefinitionPointWillLinkTo = dvObjectService.findDvObject(solrSearchResult.getEntityId());
+            if (dvObjectThatDefinitionPointWillLinkTo == null) {
+                continue;
+            }
+
+            if (dvObjectThatDefinitionPointWillLinkTo.isInstanceofDataverse()) {
+                Dataverse linkedDataverse = (Dataverse) dvObjectThatDefinitionPointWillLinkTo;
+                DataverseLinkingDataverse dvld = dvLinkingService.findDataverseLinkingDataverse(linkedDataverse.getId(), linkingDataverse.getId());
+                if(dvld != null) {
+                    Dataverse dv = commandEngine.submitInNewTransaction(new DeleteDataverseLinkingDataverseCommand(dvReq, linkingDataverse, dvld, true));
+                }
+            } else if (dvObjectThatDefinitionPointWillLinkTo.isInstanceofDataset()) {
+                Dataset linkedDataset = (Dataset) dvObjectThatDefinitionPointWillLinkTo;
+                DatasetLinkingDataverse dsld = dsLinkingService.findDatasetLinkingDataverse(linkedDataset.getId(), linkingDataverse.getId());
+                if(dsld != null) {
+                    Dataset ds = commandEngine.submitInNewTransaction(new DeleteDatasetLinkingDataverseCommand(dvReq, linkedDataset, dsld, true));
+                }
+            }
+        }
+
+        logger.fine("UNLINK SAVED SEARCH (" + savedSearch.getId() + ") total time in ms: " + (new Date().getTime() - start.getTime()));
+    }
+
     private SolrQueryResponse findHits(SavedSearch savedSearch) throws SearchException {
         String sortField = SearchFields.TYPE; // first return dataverses, then datasets
         String sortOrder = SortBy.DESCENDING;
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java
index 137ae21d793..56f85436773 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.util;
 
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2LoginBackingBean;
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.nio.ByteBuffer;
 import java.security.InvalidAlgorithmParameterException;
 import java.security.InvalidKeyException;
@@ -152,7 +152,7 @@ public static String encrypt(String value, String password ) {
                     .replaceAll("/", "_");
             
         } catch (  InvalidKeyException | NoSuchAlgorithmException | BadPaddingException
-                  | IllegalBlockSizeException | NoSuchPaddingException | UnsupportedEncodingException | InvalidAlgorithmParameterException ex) {
+                  | IllegalBlockSizeException | NoSuchPaddingException | InvalidAlgorithmParameterException ex) {
             Logger.getLogger(OAuth2LoginBackingBean.class.getName()).log(Level.SEVERE, null, ex);
             throw new RuntimeException(ex);
         }
@@ -173,7 +173,7 @@ public static String decrypt(String value, String password ) {
             return new String(decrypted);
             
         } catch ( InvalidKeyException | NoSuchAlgorithmException | BadPaddingException
-                  | IllegalBlockSizeException | NoSuchPaddingException | UnsupportedEncodingException | InvalidAlgorithmParameterException ex) {
+                  | IllegalBlockSizeException | NoSuchPaddingException | InvalidAlgorithmParameterException ex) {
             Logger.getLogger(OAuth2LoginBackingBean.class.getName()).log(Level.SEVERE, null, ex);
             throw new RuntimeException(ex);
         }
@@ -209,8 +209,8 @@ public static String sanitizeFileDirectory(String value, boolean aggressively){
     }
     
     
-    private static SecretKeySpec generateKeyFromString(final String secKey) throws UnsupportedEncodingException, NoSuchAlgorithmException {
-        byte[] key = (secKey).getBytes("UTF-8");
+    private static SecretKeySpec generateKeyFromString(final String secKey) throws NoSuchAlgorithmException {
+        byte[] key = (secKey).getBytes(StandardCharsets.UTF_8);
         MessageDigest sha = MessageDigest.getInstance("SHA-1");
         key = sha.digest(key);
         key = Arrays.copyOf(key, 16); // use only first 128 bits
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
index f9801419e47..5cc28e4b225 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
@@ -986,7 +986,7 @@ public boolean isFilePIDsEnabledForCollection(Dataverse collection) {
         Dataverse thisCollection = collection; 
         
         // If neither enabled nor disabled specifically for this collection,
-        // the parent collection setting is inhereted (recursively): 
+        // the parent collection setting is inherited (recursively): 
         while (thisCollection.getFilePIDsEnabled() == null) {
             if (thisCollection.getOwner() == null) {
                 // We've reached the root collection, and file PIDs registration
@@ -1002,8 +1002,6 @@ public boolean isFilePIDsEnabledForCollection(Dataverse collection) {
         // takes precedent:
         return thisCollection.getFilePIDsEnabled();
     }
-    
-
 
     public String getMDCLogPath() {
         String mDCLogPath = settingsService.getValueForKey(SettingsServiceBean.Key.MDCLogPath, null);
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java
index 29c4e8a6fb9..18ea3771301 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/UrlSignerUtil.java
@@ -2,7 +2,7 @@
 
 import java.net.MalformedURLException;
 import java.net.URL;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
@@ -96,7 +96,7 @@ public static boolean isValidUrl(String signedUrl, String user, String method, S
         boolean valid = true;
         try {
             URL url = new URL(signedUrl);
-            List<NameValuePair> params = URLEncodedUtils.parse(url.getQuery(), Charset.forName("UTF-8"));
+            List<NameValuePair> params = URLEncodedUtils.parse(url.getQuery(), StandardCharsets.UTF_8);
             String hash = null;
             String dateString = null;
             String allowedMethod = null;
@@ -156,7 +156,7 @@ public static boolean isValidUrl(String signedUrl, String user, String method, S
     public static boolean hasToken(String urlString) {
         try {
             URL url = new URL(urlString);
-            List<NameValuePair> params = URLEncodedUtils.parse(url.getQuery(), Charset.forName("UTF-8"));
+            List<NameValuePair> params = URLEncodedUtils.parse(url.getQuery(), StandardCharsets.UTF_8);
             for (NameValuePair nvp : params) {
                 if (nvp.getName().equals(SIGNED_URL_TOKEN)) {
                     return true;
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java
index b7c44014b80..e47426149f9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java
@@ -9,10 +9,10 @@
 import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.io.PrintWriter;
-import java.io.UnsupportedEncodingException;
 import java.net.MalformedURLException;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Paths;
 import java.security.KeyManagementException;
 import java.security.KeyStoreException;
@@ -686,12 +686,7 @@ private void createFileFromString(final String relPath, final String content)
         archiveEntry.setMethod(ZipEntry.DEFLATED);
         InputStreamSupplier supp = new InputStreamSupplier() {
             public InputStream get() {
-                try {
-                    return new ByteArrayInputStream(content.getBytes("UTF-8"));
-                } catch (UnsupportedEncodingException e) {
-                    e.printStackTrace();
-                }
-                return null;
+                return new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8));
             }
         };
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
index 84bc7834ab9..60ab9407269 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
@@ -11,6 +11,7 @@
 import edu.harvard.iq.dataverse.util.json.JsonPrinter;
 
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 import java.time.LocalDate;
 import java.util.List;
 import java.util.Map;
@@ -68,7 +69,7 @@ public OREMap(DatasetVersion dv, boolean exclude) {
     }
 
     public void writeOREMap(OutputStream outputStream) throws Exception {
-        outputStream.write(getOREMap().toString().getBytes("UTF8"));
+        outputStream.write(getOREMap().toString().getBytes(StandardCharsets.UTF_8));
         outputStream.flush();
     }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
index c72dfc1d127..adb7cf98975 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
@@ -79,7 +79,7 @@ public static void injectSettingsService(SettingsServiceBean ssb, DatasetFieldSe
     }
 
     public JsonPrinter() {
-   
+
     }
 
     public static final BriefJsonPrinter brief = new BriefJsonPrinter();
@@ -122,7 +122,7 @@ public static JsonObjectBuilder json(AuthenticatedUser authenticatedUser) {
             .add("authenticationProviderId", authenticatedUser.getAuthenticatedUserLookup().getAuthenticationProviderId());
         return builder;
     }
-    
+
     public static JsonObjectBuilder json(RoleAssignment ra) {
         return jsonObjectBuilder()
                 .add("id", ra.getId())
@@ -147,7 +147,7 @@ public static JsonObjectBuilder json(DatasetLock lock) {
                 .add("dataset", lock.getDataset().getGlobalId().asString())
                 .add("message", lock.getInfo());
     }
-    
+
     public static JsonObjectBuilder json( RoleAssigneeDisplayInfo d ) {
         return jsonObjectBuilder()
                 .add("title", d.getTitle())
@@ -171,17 +171,17 @@ public static JsonObjectBuilder json(IpGroup grp) {
                 .add("id", grp.getId() )
                 .add("name", grp.getDisplayName() )
                 .add("description", grp.getDescription() );
-       
+
         if ( ! singles.isEmpty() ) {
             bld.add("addresses", asJsonArray(singles) );
         }
-        
+
         if ( ! ranges.isEmpty() ) {
             JsonArrayBuilder rangesBld = Json.createArrayBuilder();
             ranges.forEach( r -> rangesBld.add( Json.createArrayBuilder().add(r.get(0)).add(r.get(1))) );
             bld.add("ranges", rangesBld );
         }
-        
+
         return bld;
     }
 
@@ -192,7 +192,7 @@ public static JsonObjectBuilder json(ShibGroup grp) {
                 .add("pattern", grp.getPattern())
                 .add("id", grp.getId());
     }
-    
+
     public static JsonObjectBuilder json(MailDomainGroup grp) {
         JsonObjectBuilder bld = jsonObjectBuilder()
             .add("alias", grp.getPersistedGroupAlias() )
@@ -235,14 +235,14 @@ public static JsonObjectBuilder json(DataverseRole role) {
 
         return bld;
     }
-    
+
     public static JsonObjectBuilder json(Workflow wf){
         JsonObjectBuilder bld = jsonObjectBuilder();
         bld.add("name", wf.getName());
         if ( wf.getId() != null ) {
             bld.add("id", wf.getId());
         }
-        
+
         if ( wf.getSteps()!=null && !wf.getSteps().isEmpty()) {
             JsonArrayBuilder arr = Json.createArrayBuilder();
             for ( WorkflowStepData stp : wf.getSteps() ) {
@@ -253,10 +253,10 @@ public static JsonObjectBuilder json(Workflow wf){
             }
             bld.add("steps", arr );
         }
-        
+
         return bld;
     }
-    
+
     public static JsonObjectBuilder json(Dataverse dv) {
         return json(dv, false, false);
     }
@@ -268,7 +268,7 @@ public static JsonObjectBuilder json(Dataverse dv, Boolean hideEmail, Boolean re
                 .add("alias", dv.getAlias())
                 .add("name", dv.getName())
                 .add("affiliation", dv.getAffiliation());
-        if(!hideEmail) { 
+        if(!hideEmail) {
             bld.add("dataverseContacts", JsonPrinter.json(dv.getDataverseContacts()));
         }
         if (returnOwners){
@@ -312,11 +312,11 @@ public static JsonArrayBuilder json(List<DataverseContact> dataverseContacts) {
         }
         return jsonArrayOfContacts;
     }
-    
+
     public static JsonObjectBuilder getOwnersFromDvObject(DvObject dvObject){
         return getOwnersFromDvObject(dvObject, null);
     }
-    
+
     public static JsonObjectBuilder getOwnersFromDvObject(DvObject dvObject, DatasetVersion dsv) {
         List <DvObject> ownerList = new ArrayList();
         dvObject = dvObject.getOwner(); // We're going to ignore the object itself
@@ -324,7 +324,7 @@ public static JsonObjectBuilder getOwnersFromDvObject(DvObject dvObject, Dataset
         while (dvObject != null) {
             ownerList.add(0, dvObject);
             dvObject = dvObject.getOwner();
-        } 
+        }
         //then work "inside out"
         JsonObjectBuilder saved = null;
         for (DvObject dvo : ownerList) {
@@ -332,7 +332,7 @@ public static JsonObjectBuilder getOwnersFromDvObject(DvObject dvObject, Dataset
         }
         return saved;
     }
-    
+
     private static JsonObjectBuilder addEmbeddedOwnerObject(DvObject dvo, JsonObjectBuilder isPartOf, DatasetVersion dsv ) {
         JsonObjectBuilder ownerObject = jsonObjectBuilder();
 
@@ -353,16 +353,16 @@ private static JsonObjectBuilder addEmbeddedOwnerObject(DvObject dvo, JsonObject
                ownerObject.add("version", versionString);
             }
         }
-        
+
         ownerObject.add("displayName", dvo.getDisplayName());
-        
+
         if (isPartOf != null) {
             ownerObject.add("isPartOf", isPartOf);
         }
-        
+
         return ownerObject;
     }
-    
+
     public static JsonObjectBuilder json( DataverseTheme theme ) {
         final NullSafeJsonBuilder baseObject = jsonObjectBuilder()
                 .add("id", theme.getId() )
@@ -385,7 +385,7 @@ public static JsonObjectBuilder json(BuiltinUser user) {
                 .add("id", user.getId())
                 .add("userName", user.getUserName());
     }
-    
+
     public static JsonObjectBuilder json(Dataset ds){
        return json(ds, false);
     }
@@ -421,7 +421,7 @@ public static JsonObjectBuilder json(DatasetVersion dsv, boolean includeFiles) {
         return json(dsv, null, includeFiles, false);
     }
 
-    public static JsonObjectBuilder json(DatasetVersion dsv, List<String> anonymizedFieldTypeNamesList, 
+    public static JsonObjectBuilder json(DatasetVersion dsv, List<String> anonymizedFieldTypeNamesList,
         boolean includeFiles, boolean returnOwners) {
         Dataset dataset = dsv.getDataset();
         JsonObjectBuilder bld = jsonObjectBuilder()
@@ -471,7 +471,7 @@ public static JsonObjectBuilder json(DatasetVersion dsv, List<String> anonymized
         bld.add("metadataBlocks", (anonymizedFieldTypeNamesList != null) ?
                 jsonByBlocks(dsv.getDatasetFields(), anonymizedFieldTypeNamesList)
                 : jsonByBlocks(dsv.getDatasetFields())
-        );       
+        );
         if(returnOwners){
             bld.add("isPartOf", getOwnersFromDvObject(dataset));
         }
@@ -483,19 +483,19 @@ public static JsonObjectBuilder json(DatasetVersion dsv, List<String> anonymized
     }
 
     public static JsonObjectBuilder jsonDataFileList(List<DataFile> dataFiles){
-    
+
         if (dataFiles==null){
             throw new NullPointerException("dataFiles cannot be null");
         }
-        
+
         JsonObjectBuilder bld = jsonObjectBuilder();
-        
-        
+
+
         List<FileMetadata> dataFileList = dataFiles.stream()
                                     .map(x -> x.getFileMetadata())
                                     .collect(Collectors.toList());
 
-        
+
         bld.add("files", jsonFileMetadatas(dataFileList));
 
         return bld;
@@ -584,7 +584,7 @@ public static JsonObjectBuilder json(MetadataBlock block, List<DatasetField> fie
 
         blockBld.add("displayName", block.getDisplayName());
         blockBld.add("name", block.getName());
-        
+
         final JsonArrayBuilder fieldsArray = Json.createArrayBuilder();
         Map<Long, JsonObject> cvocMap = (datasetFieldService==null) ? new HashMap<Long, JsonObject>() :datasetFieldService.getCVocConf(true);
         DatasetFieldWalker.walk(fields, settingsService, cvocMap, new DatasetFieldsToJson(fieldsArray, anonymizedFieldTypeNamesList));
@@ -632,37 +632,40 @@ public static JsonObjectBuilder json(MetadataBlock metadataBlock) {
     }
 
     public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean printOnlyDisplayedOnCreateDatasetFieldTypes, Dataverse ownerDataverse) {
-        JsonObjectBuilder jsonObjectBuilder = jsonObjectBuilder();
-        jsonObjectBuilder.add("id", metadataBlock.getId());
-        jsonObjectBuilder.add("name", metadataBlock.getName());
-        jsonObjectBuilder.add("displayName", metadataBlock.getDisplayName());
-        jsonObjectBuilder.add("displayOnCreate", metadataBlock.isDisplayOnCreate());
-
-        JsonObjectBuilder fieldsBuilder = Json.createObjectBuilder();
-        Set<DatasetFieldType> datasetFieldTypes = new TreeSet<>(metadataBlock.getDatasetFieldTypes());
-
-        for (DatasetFieldType datasetFieldType : datasetFieldTypes) {
-            Long datasetFieldTypeId = datasetFieldType.getId();
-            boolean requiredAsInputLevelInOwnerDataverse = ownerDataverse != null && ownerDataverse.isDatasetFieldTypeRequiredAsInputLevel(datasetFieldTypeId);
-            boolean includedAsInputLevelInOwnerDataverse = ownerDataverse != null && ownerDataverse.isDatasetFieldTypeIncludedAsInputLevel(datasetFieldTypeId);
-            boolean isNotInputLevelInOwnerDataverse = ownerDataverse != null && !ownerDataverse.isDatasetFieldTypeInInputLevels(datasetFieldTypeId);
+        JsonObjectBuilder jsonObjectBuilder = jsonObjectBuilder()
+                .add("id", metadataBlock.getId())
+                .add("name", metadataBlock.getName())
+                .add("displayName", metadataBlock.getDisplayName())
+                .add("displayOnCreate", metadataBlock.isDisplayOnCreate());
 
-            DatasetFieldType parentDatasetFieldType = datasetFieldType.getParentDatasetFieldType();
-            boolean isRequired = parentDatasetFieldType == null ? datasetFieldType.isRequired() : parentDatasetFieldType.isRequired();
+        Set<DatasetFieldType> datasetFieldTypes;
 
-            boolean displayCondition = printOnlyDisplayedOnCreateDatasetFieldTypes
-                    ? (datasetFieldType.isDisplayOnCreate() || isRequired || requiredAsInputLevelInOwnerDataverse)
-                    : ownerDataverse == null || includedAsInputLevelInOwnerDataverse || isNotInputLevelInOwnerDataverse;
+        if (ownerDataverse != null) {
+            datasetFieldTypes = new TreeSet<>(datasetFieldService.findAllInMetadataBlockAndDataverse(
+                    metadataBlock, ownerDataverse, printOnlyDisplayedOnCreateDatasetFieldTypes));
+        } else {
+            datasetFieldTypes = printOnlyDisplayedOnCreateDatasetFieldTypes
+                    ? new TreeSet<>(datasetFieldService.findAllDisplayedOnCreateInMetadataBlock(metadataBlock))
+                    : new TreeSet<>(metadataBlock.getDatasetFieldTypes());
+        }
 
-            if (displayCondition) {
-                fieldsBuilder.add(datasetFieldType.getName(), json(datasetFieldType, ownerDataverse));
-            }
+        JsonObjectBuilder fieldsBuilder = Json.createObjectBuilder();
+        for (DatasetFieldType datasetFieldType : datasetFieldTypes) {
+            fieldsBuilder.add(datasetFieldType.getName(), json(datasetFieldType, ownerDataverse));
         }
 
         jsonObjectBuilder.add("fields", fieldsBuilder);
         return jsonObjectBuilder;
     }
 
+    public static JsonArrayBuilder jsonDatasetFieldTypes(List<DatasetFieldType> fields) {
+        JsonArrayBuilder fieldsJson = Json.createArrayBuilder();
+        for (DatasetFieldType field : fields) {
+            fieldsJson.add(JsonPrinter.json(field));
+        }
+        return fieldsJson;
+    }
+
     public static JsonObjectBuilder json(DatasetFieldType fld) {
         return json(fld, null);
     }
@@ -705,7 +708,7 @@ public static JsonObjectBuilder json(DatasetFieldType fld, Dataverse ownerDatave
 
         return fieldsBld;
     }
-    
+
     public static JsonObjectBuilder json(FileMetadata fmd){
         return json(fmd, false, false);
     }
@@ -751,11 +754,11 @@ public static JsonObjectBuilder json(AuxiliaryFile auxFile) {
     public static JsonObjectBuilder json(DataFile df) {
         return JsonPrinter.json(df, null, false);
     }
-    
+
     public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boolean forExportDataProvider){
         return json(df, fileMetadata, forExportDataProvider, false);
     }
-    
+
     public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boolean forExportDataProvider, boolean returnOwners) {
         // File names are no longer stored in the DataFile entity; 
         // (they are instead in the FileMetadata (as "labels") - this way 
@@ -766,13 +769,13 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo
         // *correct* file name - i.e., that it comes from the right version. 
         // (TODO...? L.A. 4.5, Aug 7 2016)
         String fileName = null;
-        
+
         if (fileMetadata == null){
             // Note that this may not necessarily grab the file metadata from the 
             // version *you want*! (L.A.)
             fileMetadata = df.getFileMetadata();
         }
-         
+
         fileName = fileMetadata.getLabel();
         GlobalId filePid = df.getGlobalId();
         String pidURL = (filePid!=null)? filePid.asURL(): null;
@@ -839,7 +842,7 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo
         }
         return builder;
     }
-    
+
     //Started from https://github.com/RENCI-NRIG/dataverse/, i.e. https://github.com/RENCI-NRIG/dataverse/commit/2b5a1225b42cf1caba85e18abfeb952171c6754a
     public static JsonArrayBuilder jsonDT(List<DataTable> ldt) {
         JsonArrayBuilder ldtArr = Json.createArrayBuilder();
@@ -880,8 +883,8 @@ public static JsonObjectBuilder json(DataVariable dv) {
             .add("variableFormatType", dv.getType().name()) // varFormat
             .add("formatCategory", dv.getFormatCategory())
             .add("format", dv.getFormat())
-            .add("isOrderedCategorical", dv.isOrderedCategorical()) 
-            .add("fileOrder", dv.getFileOrder()) 
+            .add("isOrderedCategorical", dv.isOrderedCategorical())
+            .add("fileOrder", dv.getFileOrder())
             .add("UNF",dv.getUnf())
             .add("fileStartPosition", dv.getFileStartPosition())
             .add("fileEndPosition", dv.getFileEndPosition())
@@ -909,7 +912,7 @@ private static JsonArrayBuilder jsonInvalidRanges(Collection<VariableRange> inva
             .add("hasEndValueType", vr.getEndValueType()!=null)
             .add("endValueTypeMax", vr.isEndValueTypeMax())
             .add("endValueTypeMaxExcl", vr.isEndValueTypeMaxExcl());
-            
+
             invRanges.add(job);
         }
         return invRanges;
@@ -941,7 +944,7 @@ private static JsonArrayBuilder jsonCatStat(Collection<VariableCategory> catStat
         }
         return catArr;
     }
-    
+
     private static JsonArrayBuilder jsonVarGroup(List<VarGroup> varGroups) {
         JsonArrayBuilder vgArr = Json.createArrayBuilder();
         for (VarGroup vg : varGroups) {
@@ -955,7 +958,7 @@ private static JsonArrayBuilder jsonVarGroup(List<VarGroup> varGroups) {
         }
         return vgArr;
     }
-    
+
     private static JsonArrayBuilder jsonVarMetadata(Collection<VariableMetadata> varMetadatas) {
         JsonArrayBuilder vmArr = Json.createArrayBuilder();
         for (VariableMetadata vm : varMetadatas) {
@@ -976,7 +979,7 @@ private static JsonArrayBuilder jsonVarMetadata(Collection<VariableMetadata> var
         }
         return vmArr;
     }
-    
+
     private static JsonArrayBuilder json(Collection<CategoryMetadata> categoriesMetadata) {
         JsonArrayBuilder cmArr = Json.createArrayBuilder();
         for(CategoryMetadata cm: categoriesMetadata) {
@@ -990,9 +993,9 @@ private static JsonArrayBuilder json(Collection<CategoryMetadata> categoriesMeta
 
     public static JsonObjectBuilder json(HarvestingClient harvestingClient) {
         if (harvestingClient == null) {
-            return null; 
+            return null;
         }
-        
+
         return jsonObjectBuilder().add("nickName", harvestingClient.getName()).
                 add("dataverseAlias", harvestingClient.getDataverse().getAlias()).
                 add("type", harvestingClient.getHarvestType()).
@@ -1014,7 +1017,7 @@ public static JsonObjectBuilder json(HarvestingClient harvestingClient) {
                 add("lastDatasetsDeleted", harvestingClient.getLastDeletedDatasetCount()). // == null ? "N/A" : harvestingClient.getLastDeletedDatasetCount().toString()).
                 add("lastDatasetsFailed", harvestingClient.getLastFailedDatasetCount()); // == null ? "N/A" : harvestingClient.getLastFailedDatasetCount().toString());
     }
-    
+
     public static String format(Date d) {
         return (d == null) ? null : Util.getDateTimeFormat().format(d);
     }
@@ -1051,7 +1054,7 @@ public static JsonArrayBuilder getTabularFileTags(DataFile df) {
         }
         return tabularTags;
     }
-    
+
     private static class DatasetFieldsToJson implements DatasetFieldWalker.Listener {
 
         Deque<JsonObjectBuilder> objectStack = new LinkedList<>();
@@ -1187,11 +1190,20 @@ public static JsonObjectBuilder json( ExplicitGroup eg ) {
                     .add("displayName", eg.getDisplayName())
                     .add("containedRoleAssignees", ras);
     }
-    
-    public static JsonObjectBuilder json( DataverseFacet aFacet ) {
+
+    public static JsonArrayBuilder jsonDataverseFacets(List<DataverseFacet> dataverseFacets) {
+        JsonArrayBuilder dataverseFacetsJson = Json.createArrayBuilder();
+        for(DataverseFacet facet: dataverseFacets) {
+            dataverseFacetsJson.add(json(facet));
+        }
+        return dataverseFacetsJson;
+    }
+
+    public static JsonObjectBuilder json(DataverseFacet aFacet) {
         return jsonObjectBuilder()
                     .add("id", String.valueOf(aFacet.getId())) // TODO should just be id I think
-                    .add("name", aFacet.getDatasetFieldType().getDisplayName());
+                    .add("displayName", aFacet.getDatasetFieldType().getDisplayName())
+                    .add("name", aFacet.getDatasetFieldType().getName());
     }
 
     public static JsonObjectBuilder json(Embargo embargo) {
@@ -1329,7 +1341,7 @@ public static JsonObjectBuilder getChecksumTypeAndValue(DataFile.ChecksumType ch
             return null;
         }
     }
-    
+
     /**
      * Takes a map, returns a Json object for this map.
      * If map is {@code null}, returns {@code null}.
@@ -1384,4 +1396,20 @@ public static JsonArrayBuilder jsonDataverseFieldTypeInputLevels(List<DataverseF
         }
         return jsonArrayOfInputLevels;
     }
+
+    public static JsonArrayBuilder jsonDataverseInputLevels(List<DataverseFieldTypeInputLevel> inputLevels) {
+        JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder();
+        for (DataverseFieldTypeInputLevel inputLevel : inputLevels) {
+            inputLevelsArrayBuilder.add(jsonDataverseInputLevel(inputLevel));
+        }
+        return inputLevelsArrayBuilder;
+    }
+
+    private static JsonObjectBuilder jsonDataverseInputLevel(DataverseFieldTypeInputLevel inputLevel) {
+        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
+        jsonObjectBuilder.add("datasetFieldTypeName", inputLevel.getDatasetFieldType().getName());
+        jsonObjectBuilder.add("required", inputLevel.isRequired());
+        jsonObjectBuilder.add("include", inputLevel.isInclude());
+        return jsonObjectBuilder;
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/JSONDataValidation.java b/src/main/java/edu/harvard/iq/dataverse/validation/JSONDataValidation.java
new file mode 100644
index 00000000000..fb19a14e7de
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/validation/JSONDataValidation.java
@@ -0,0 +1,190 @@
+package edu.harvard.iq.dataverse.validation;
+
+import com.mashape.unirest.http.JsonNode;
+import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
+import edu.harvard.iq.dataverse.DatasetFieldType;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+import jakarta.enterprise.inject.spi.CDI;
+import org.everit.json.schema.Schema;
+import org.everit.json.schema.ValidationException;
+import org.json.JSONArray;
+
+import java.util.*;
+import java.util.logging.Logger;
+import java.util.stream.Collectors;
+
+public class JSONDataValidation {
+    private static final Logger logger = Logger.getLogger(JSONDataValidation.class.getCanonicalName());
+    private static DatasetFieldServiceBean datasetFieldService = null;
+
+    /**
+     *
+     * @param schema Schema file defining the JSON objects to be validated
+     * @param jsonInput JSON string to validate against the schema
+     * @throws ValidationException
+     */
+    public static void validate(Schema schema, Map<String, Map<String, List<String>>> schemaChildMap, String jsonInput) throws ValidationException {
+        if (datasetFieldService == null) {
+            datasetFieldService = CDI.current().select(DatasetFieldServiceBean.class).get();
+        }
+        JsonNode node = new JsonNode(jsonInput);
+        if (node.isArray()) {
+            JSONArray arrayNode = node.getArray();
+            validateObject(schema, schemaChildMap, "root", arrayNode.toList());
+        } else {
+            node.getObject().toMap().forEach((k,v) -> {
+                validateObject(schema, schemaChildMap, k, (v instanceof JSONArray) ? ((JSONArray) v).toList() : v);
+            });
+        }
+    }
+
+    /*
+     * Validate objects recursively
+     */
+    private static void validateObject(Schema schema, Map<String, Map<String,List<String>>> schemaChildMap, String key, Object value) {
+        if (value instanceof Map<?,?>) {
+            validateSchemaObject(schema, schemaChildMap, key, (Map) value);
+
+            ((Map<?, ?>) value).entrySet().forEach(e -> {
+                validateObject(schema, schemaChildMap, (String) e.getKey(), e.getValue());
+            });
+        } else if (value instanceof List) {
+            ((List<?>) value).listIterator().forEachRemaining(v -> {
+                validateObject(schema, schemaChildMap, key, v);
+            });
+        }
+    }
+
+    /*
+     * Validate objects specific to a type. Currently only validating Datasets
+     */
+    private static void validateSchemaObject(Schema schema, Map<String, Map<String,List<String>>> schemaChildMap, String key, Map valueMap) {
+        if (schema.definesProperty("datasetVersion")) {
+            validateDatasetObject(schema, schemaChildMap, key, valueMap);
+        }
+    }
+
+    /*
+     * Specific validation for Dataset objects
+     */
+    private static void validateDatasetObject(Schema schema, Map<String, Map<String,List<String>>> schemaChildMap, String key, Map valueMap) {
+        if (valueMap != null && valueMap.containsKey("typeClass")) {
+            validateTypeClass(schema, schemaChildMap, key, valueMap, valueMap.get("value"), "dataset");
+        }
+    }
+
+    /*
+     * key: The name of the parent object
+     * valueMap: Map of all the metadata of the object
+     * value: The value field of the object
+     * messageType: Refers to the parent: if this is an object from a dataset the messageType would be 'dataset'
+     *              This needs to match the Bundle.properties for mapping the error messages when an exception occurs
+     *
+     *  Rules for typeClass:
+     *      The contents of value depend on the field attributes
+     *      if single/primitive, value is a String
+     *      if multiple, value is a JsonArray
+     *         multiple/primitive: each JsonArray element will contain String
+     *         multiple/compound: each JsonArray element will contain Set of FieldDTOs
+     */
+    private static void validateTypeClass(Schema schema, Map<String, Map<String,List<String>>> schemaChildMap, String key, Map valueMap, Object value, String messageType) {
+
+        String typeClass = valueMap.containsKey("typeClass") ? valueMap.get("typeClass").toString() : "";
+        String typeName = valueMap.containsKey("typeName") ? valueMap.get("typeName").toString() : "";
+        boolean multiple = Boolean.valueOf(String.valueOf(valueMap.getOrDefault("multiple", "false")));
+
+        // make sure there is a value since 'value' is required
+        if (value == null) {
+            throwValidationException("value.missing", List.of(key, typeName));
+        }
+
+        if (multiple && !(value instanceof List<?>)) {
+            throwValidationException("notlist.multiple", List.of(key, typeName, typeClass));
+        }
+        if (!multiple && value instanceof List<?>) {
+            throwValidationException("list.notmultiple", List.of(key, typeName));
+        }
+        if ("primitive".equals(typeClass) && !multiple && !(value instanceof String)) {
+            throwValidationException("type", List.of(key, typeName, typeClass));
+        }
+        if ("primitive".equals(typeClass) && multiple) {
+            ((List<?>) value).listIterator().forEachRemaining(primitive -> {
+                if (!(primitive instanceof String)) {
+                    throwValidationException("type", List.of(key, typeName, typeClass));
+                }
+            });
+        }
+        if ("compound".equals(typeClass)) {
+            if (multiple && value instanceof List<?>) {
+                ((List<?>) value).listIterator().forEachRemaining(item -> {
+                    if (!(item instanceof Map<?, ?>)) {
+                        throwValidationException("compound", List.of(key, typeName, typeClass));
+                    } else {
+                        ((Map) item).forEach((k,val) -> {
+                            if (!(val instanceof Map<?, ?>)) {
+                                throwValidationException("compound", List.of(key, typeName, typeClass));
+                            }
+                            // validate mismatch between compound object key and typeName in value
+                            String valTypeName = ((Map<?, ?>) val).containsKey("typeName") ? (String) ((Map<?, ?>) val).get("typeName") : "";
+                            if (!k.equals(valTypeName)) {
+                                throwValidationException("compound.mismatch", List.of((String) k, valTypeName));
+                            }
+                        });
+                        validateChildren(schema, schemaChildMap, key, ((Map) item).values(), typeName, messageType);
+                    }
+                });
+            }
+        }
+
+        if ("controlledVocabulary".equals(typeClass)) {
+            DatasetFieldType dsft = datasetFieldService.findByName(typeName);
+            if (value instanceof List<?>) {
+                ((List<?>) value).listIterator().forEachRemaining(cvv -> {
+                    if (datasetFieldService.findControlledVocabularyValueByDatasetFieldTypeAndStrValue(dsft, (String) cvv, true) == null) {
+                        throwValidationException("dataset", "cvv.missing", List.of(key, typeName, (String) cvv));
+                    }
+                });
+            } else {
+                if (datasetFieldService.findControlledVocabularyValueByDatasetFieldTypeAndStrValue(dsft, (String) value, true) == null) {
+                    throwValidationException("dataset", "cvv.missing", List.of(key, typeName, (String) value));
+                }
+            }
+        }
+    }
+
+    // If value is another object or list of objects that need to be further validated then childType refers to the parent
+    // Example: If this is a dsDescriptionValue from a dataset the messageType would be dataset.dsDescriptionValue
+    // This needs to match the Bundle.properties for mapping the error messages when an exception occurs
+    private static void validateChildren(Schema schema, Map<String, Map<String,List<String>>> schemaChildMap, String key, Collection<Object> children, String typeName, String messageType) {
+        if (children == null || children.isEmpty()) {
+            return;
+        }
+        List<String> requiredFields = new ArrayList<>();
+        requiredFields.addAll((List)schemaChildMap.getOrDefault(typeName, Collections.EMPTY_MAP).getOrDefault("required", Collections.EMPTY_LIST));
+        List<String> allowedFields = (List)schemaChildMap.getOrDefault(typeName, Collections.EMPTY_MAP).getOrDefault("allowed", Collections.EMPTY_LIST);
+        children.forEach(child -> {
+            if (child instanceof Map<?, ?>) {
+                String childTypeName = ((Map<?, ?>) child).containsKey("typeName") ? (String)((Map<?, ?>) child).get("typeName") : "";
+                if (!allowedFields.isEmpty() && !allowedFields.contains(childTypeName)) {
+                    throwValidationException(messageType, "invalidType", List.of(typeName, childTypeName, allowedFields.stream().collect(Collectors.joining(", "))));
+                }
+                if (!requiredFields.isEmpty() && requiredFields.contains(childTypeName)) {
+                    requiredFields.remove(childTypeName);
+                }
+            }
+        });
+        if (!requiredFields.isEmpty()) {
+            throwValidationException(messageType, "required.missing", List.of(typeName, requiredFields.stream().collect(Collectors.joining(", ")), typeName));
+        }
+    }
+    private static void throwValidationException(String key, List<String> argList) {
+        throw new ValidationException(BundleUtil.getStringFromBundle("schema.validation.exception." + key, argList));
+    }
+    private static void throwValidationException(String type, String message, List<String> argList) {
+        if (type != null) {
+            throwValidationException(type + "." + message, argList);
+        } else {
+            throwValidationException(message, argList);
+        }
+    }
+}
diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties
index b645276ceaf..4fc7ce88a7f 100644
--- a/src/main/java/propertyFiles/Bundle.properties
+++ b/src/main/java/propertyFiles/Bundle.properties
@@ -899,6 +899,8 @@ dataverse.link.dataset.none=No linkable dataverses available.
 dataverse.link.no.choice=You have one dataverse you can add linked dataverses and datasets in.
 dataverse.link.no.linkable=To be able to link a dataverse or dataset, you need to have your own dataverse. Create a dataverse to get started.
 dataverse.link.no.linkable.remaining=You have already linked all of your eligible dataverses.
+dataverse.unlink.dataset.choose=Enter the name of the dataverse you would like to unlink this dataset from.
+dataverse.unlink.dataset.none=No linked dataverses available.
 dataverse.savedsearch.link=Link Search
 dataverse.savedsearch.searchquery=Search
 dataverse.savedsearch.filterQueries=Facets
@@ -912,6 +914,7 @@ dataverse.linked.success= {0} has been successfully linked to {1}.
 dataverse.linked.success.wait= {0} has been successfully linked to {1}. Please wait for its contents to appear.
 dataverse.linked.internalerror={0} has been successfully linked to {1} but contents will not appear until an internal error has been fixed.
 dataverse.linked.error.alreadyLinked={0} has already been linked to {1}.
+dataverse.unlinked.success= {0} has been successfully unlinked from {1}.
 dataverse.page.pre=Previous
 dataverse.page.next=Next
 dataverse.byCategory=Dataverses by Category
@@ -947,7 +950,13 @@ dataverse.default=(Default)
 dataverse.metadatalanguage.setatdatasetcreation=Chosen at Dataset Creation
 dataverse.guestbookentry.atdownload=Guestbook Entry At Download
 dataverse.guestbookentry.atrequest=Guestbook Entry At Access Request
-dataverse.updateinputlevels.error.invalidfieldtypename=Invalid dataset field type name: {0}
+dataverse.inputlevels.error.invalidfieldtypename=Invalid dataset field type name: {0}
+dataverse.inputlevels.error.cannotberequiredifnotincluded=The input level for the dataset field type {0} cannot be required if it is not included
+dataverse.facets.error.fieldtypenotfound=Can't find dataset field type '{0}'
+dataverse.facets.error.fieldtypenotfacetable=Dataset field type '{0}' is not facetable
+dataverse.metadatablocks.error.invalidmetadatablockname=Invalid metadata block name: {0}
+dataverse.create.error.jsonparse=Error parsing Json: {0}
+dataverse.create.error.jsonparsetodataverse=Error parsing the POSTed json into a dataverse: {0}
 # rolesAndPermissionsFragment.xhtml
 
 # advanced.xhtml
@@ -1417,6 +1426,7 @@ dataset.accessBtn.too.big=The dataset is too large to download. Please select th
 dataset.accessBtn.original.too.big=The dataset is too large to download in the original format. Please select the files you need from the files table.
 dataset.accessBtn.archival.too.big=The dataset is too large to download in the archival format. Please select the files you need from the files table.
 dataset.linkBtn=Link Dataset
+dataset.unlinkBtn=Unlink Dataset
 dataset.contactBtn=Contact Owner
 dataset.shareBtn=Share
 
@@ -1524,6 +1534,8 @@ dataset.link.not.to.parent.dataverse=Can't link a dataset to its parent datavers
 dataset.link.not.published=Can't link a dataset that has not been published
 dataset.link.not.available=Can't link a dataset that has not been published or is not harvested
 dataset.link.not.already.linked=Can't link a dataset that has already been linked to this dataverse
+dataset.unlink.title=Unlink Dataset
+dataset.unlink.delete=Remove Linked Dataset
 dataset.email.datasetContactTitle=Contact Dataset Owner
 dataset.email.hiddenMessage= 
 dataset.email.messageSubject=Test Message Subject
@@ -1605,6 +1617,7 @@ dataset.message.createSuccess=This dataset has been created.
 dataset.message.createSuccess.failedToSaveFiles=Partial Success: The dataset has been created. But the file(s) could not be saved. Please try uploading the file(s) again.
 dataset.message.createSuccess.partialSuccessSavingFiles=Partial Success: The dataset has been created. But only {0} out of {1} files have been saved. Please try uploading the missing file(s) again.
 dataset.message.linkSuccess= {0} has been successfully linked to {1}.
+dataset.message.unlinkSuccess= {0} has been successfully unlinked from {1}.
 dataset.message.metadataSuccess=The metadata for this dataset have been updated.
 dataset.message.termsSuccess=The terms for this dataset have been updated.
 dataset.message.filesSuccess=One or more files have been updated.
@@ -2440,7 +2453,7 @@ permission.publishDataverse=Publish a dataverse
 permission.managePermissionsDataFile=Manage permissions for a file
 permission.managePermissionsDataset=Manage permissions for a dataset
 permission.managePermissionsDataverse=Manage permissions for a dataverse
-permission.editDataset=Edit a dataset's metadata
+permission.editDataset=Edit a dataset's metadata, license, terms and add/delete files
 permission.editDataverse=Edit a dataverse's metadata, facets, customization, and templates 
 permission.downloadFile=Download a file
 permission.viewUnpublishedDataset=View an unpublished dataset and its files
@@ -2540,6 +2553,7 @@ dataset.registered.msg=Your dataset is now registered.
 dataset.notlinked=DatasetNotLinked
 dataset.notlinked.msg=There was a problem linking this dataset to yours:
 dataset.linking.popop.already.linked.note=Note: This dataset is already linked to the following dataverse(s):  
+dataset.linking.popup.not.linked.note=Note: This dataset is not linked to any of your accessible dataverses
 datasetversion.archive.success=Archival copy of Version successfully submitted
 datasetversion.archive.failure=Error in submitting an archival copy 
 datasetversion.update.failure=Dataset Version Update failed. Changes are still in the DRAFT version.
@@ -2733,7 +2747,7 @@ dataverses.api.create.dataset.error.mustIncludeVersion=Please provide initial ve
 dataverses.api.create.dataset.error.superuserFiles=Only a superuser may add files via this api
 dataverses.api.create.dataset.error.mustIncludeAuthorName=Please provide author name in the dataset json
 dataverses.api.validate.json.succeeded=The Dataset JSON provided is valid for this Dataverse Collection.
-dataverses.api.validate.json.failed=The Dataset JSON provided failed validation with the following error: 
+dataverses.api.validate.json.failed=The Dataset JSON provided failed validation with the following error:
 dataverses.api.validate.json.exception=Validation failed with following exception:
 
 #Access.java
@@ -2794,7 +2808,7 @@ permission.PublishDataverse.desc=Publish a dataverse
 permission.ManageFilePermissions.desc=Manage permissions for a file
 permission.ManageDatasetPermissions.desc=Manage permissions for a dataset
 permission.ManageDataversePermissions.desc=Manage permissions for a dataverse
-permission.EditDataset.desc=Edit a dataset's metadata
+permission.EditDataset.desc=Edit a dataset's metadata, license, terms and add/delete files
 permission.EditDataverse.desc=Edit a dataverse's metadata, facets, customization, and templates 
 permission.DownloadFile.desc=Download a file
 permission.ViewUnpublishedDataset.desc=View an unpublished dataset and its files
@@ -3002,19 +3016,31 @@ pids.api.reservePid.success=PID reserved for {0}
 pids.api.deletePid.success=PID deleted for {0}
 pids.deletePid.failureExpected=Unable to delete PID {0}. Status code: {1}.
 pids.deletePid.failureOther=Problem deleting PID {0}: {1}
-pids.commands.reservePid.failure=Problem reserving PID for dataset id {0}: {1}.
 pids.datacite.errors.noResponseCode=Problem getting HTTP status code from {0}. Is it in DNS? Is doi.dataciterestapiurlstring configured properly?
 pids.datacite.errors.DoiOnly=Only doi: is supported.
 
-#PublishDatasetCommand
-publishDatasetCommand.pidNotReserved=Cannot publish dataset because its persistent identifier has not been reserved.
+#AbstractDatasetCommand
+abstractDatasetCommand.pidNotReserved=Unable to reserve a persistent identifier for the dataset: {0}.
+abstractDatasetCommand.filePidNotReserved=Unable to reserve a persistent identifier for one or more files in the dataset: {0}.
+abstractDatasetCommand.pidReservationRetryExceeded="This dataset may not be registered because its identifier is already in use by another dataset: gave up after {0} attempts. Current (last requested) identifier: {1}"
 
 # APIs
 api.errors.invalidApiToken=Invalid API token.
 api.ldninbox.citation.alert={0},<br><br> The {1} has just been notified that the {2}, <a href=''{3}''>{3}</a>, cites "<a href=''{4}/dataset.xhtml?persistentId={5}''>{6}</a>" in this repository.
 api.ldninbox.citation.subject={0}: A Dataset Citation has been reported!
 
+#Schema Validation
+schema.validation.exception.value.missing=Invalid data for key:{0} typeName:{1}. 'value' missing.
+schema.validation.exception.list.notmultiple=Invalid data for key:{0} typeName:{1}. Found value as list but ''multiple'' is set to false.
+schema.validation.exception.notlist.multiple=Invalid data for key:{0} typeName:{1}. Fields with ''multiple'' set to true must be a list.
+schema.validation.exception.compound=Compound data type must be accompanied by a value that is either an object (multiple=false) or a list of objects (multiple=true)
+schema.validation.exception.compound.mismatch=Compound value {0} must match typeName of the object. Found {1}
+schema.validation.exception.dataset.cvv.missing=Controlled vocabulary for key:{0} typeName:{1} value:''{2}'' is not valid.
+schema.validation.exception.dataset.invalidType=Invalid data for key:{0} typeName:{1}. Only {2} allowed.
+schema.validation.exception.dataset.required.missing=Invalid data for key:{0}. {1} is(are) required if field type is {2}.
+
 #Info.java
 openapi.exception.invalid.format=Invalid format {0}, currently supported formats are YAML and JSON.
 openapi.exception=Supported format definition not found.
 openapi.exception.unaligned=Unaligned parameters on Headers [{0}] and Request [{1}]
+
diff --git a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties
index 97b2eed111c..630539d912e 100644
--- a/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties
+++ b/src/main/java/propertyFiles/MimeTypeDetectionByFileExtension.properties
@@ -38,3 +38,4 @@ nf=text/x-nextflow
 Rmd=text/x-r-notebook
 rb=text/x-ruby-script
 dag=text/x-dagman
+glb=model/gltf-binary
diff --git a/src/main/java/propertyFiles/MimeTypeDisplay.properties b/src/main/java/propertyFiles/MimeTypeDisplay.properties
index 8e5a251abbf..549b2b13442 100644
--- a/src/main/java/propertyFiles/MimeTypeDisplay.properties
+++ b/src/main/java/propertyFiles/MimeTypeDisplay.properties
@@ -219,6 +219,8 @@ video/quicktime=Quicktime Video
 video/webm=WebM Video
 # Network Data
 text/xml-graphml=GraphML Network Data
+# 3D Data
+model/gltf-binary=3D Model
 # Other
 application/octet-stream=Unknown
 application/x-docker-file=Docker Image File
diff --git a/src/main/java/propertyFiles/MimeTypeFacets.properties b/src/main/java/propertyFiles/MimeTypeFacets.properties
index 0dad8daff4c..0b0fde89cbd 100644
--- a/src/main/java/propertyFiles/MimeTypeFacets.properties
+++ b/src/main/java/propertyFiles/MimeTypeFacets.properties
@@ -223,6 +223,8 @@ video/webm=Video
 # (anything else that looks like image/* will also be indexed as facet type "Video")
 # Network Data
 text/xml-graphml=Network Data
+# 3D Data
+model/gltf-binary=3D Data
 # Other
 application/octet-stream=Unknown
 application/ld+json;\u0020profile\u003d\u0022http\u003a//www.w3.org/ns/json-ld#flattened\u0020http\u003a//www.w3.org/ns/json-ld#compacted\u0020https\u003a//w3id.org/ro/crate\u0022=Metadata
diff --git a/src/main/java/propertyFiles/codeMeta20.properties b/src/main/java/propertyFiles/codeMeta20.properties
index c0e7eac6d4a..4f3eb087aa4 100644
--- a/src/main/java/propertyFiles/codeMeta20.properties
+++ b/src/main/java/propertyFiles/codeMeta20.properties
@@ -1,5 +1,6 @@
 metadatablock.name=codeMeta20
-metadatablock.displayName=Software Metadata (CodeMeta 2.0)
+metadatablock.displayName=Software Metadata (CodeMeta v2.0)
+metadatablock.displayFacet=Software
 datasetfieldtype.codeVersion.title=Software Version
 datasetfieldtype.codeVersion.description=Version of the software instance, usually following some convention like SemVer etc.
 datasetfieldtype.codeVersion.watermark=e.g. 0.2.1 or 1.3 or 2021.1 etc
diff --git a/src/main/resources/db/migration/V6.3.0.1.sql b/src/main/resources/db/migration/V6.3.0.1.sql
new file mode 100644
index 00000000000..fd9cd823868
--- /dev/null
+++ b/src/main/resources/db/migration/V6.3.0.1.sql
@@ -0,0 +1,10 @@
+UPDATE termsofuseandaccess SET license_id = (SELECT license.id FROM license WHERE license.name = 'CC0 1.0'), termsofuse = NULL
+WHERE termsofuse = 'This dataset is made available under a Creative Commons CC0 license with the following additional/modified terms and conditions: CC0 Waiver'
+  AND license_id IS null
+  AND confidentialitydeclaration IS null
+  AND specialpermissions IS null
+  AND restrictions IS null
+  AND citationrequirements IS null
+  AND depositorrequirements IS null
+  AND conditions IS null
+  AND disclaimer IS null;
diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml
index 427615f2f0b..732c634205f 100644
--- a/src/main/webapp/WEB-INF/web.xml
+++ b/src/main/webapp/WEB-INF/web.xml
@@ -182,6 +182,11 @@
         <extension>webmanifest</extension>
         <mime-type>application/manifest+json</mime-type>
     </mime-mapping>
+    <mime-mapping>
+        <extension>xhtml</extension>
+        <mime-type>text/html</mime-type>
+    </mime-mapping>
+    
     <!-- BEGIN Data Deposit API (SWORD v2) -->
     <!-- See also SwordConfigurationImpl for how deprecation warnings are configured -->
     <context-param>
diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml
index 42a4c3ec5f5..255e63fbfc2 100644
--- a/src/main/webapp/dataset-license-terms.xhtml
+++ b/src/main/webapp/dataset-license-terms.xhtml
@@ -44,7 +44,7 @@
                                        <h:outputText value="#{bundle['file.dataFilesTab.terms.list.license.view.description']}" escape="false"/>
                                    </p>
                                </ui:fragment>
-                               <ui:fragment rendered="#{empty editMode and empty termsOfUseAndAccess.license}">
+                               <ui:fragment rendered="#{datasetPage == true and empty editMode and empty termsOfUseAndAccess.license}">
                                    <p>
                                        <h:outputText value="#{DatasetUtil:getLicenseName(DatasetPage.workingVersion).concat(' ')}" escape="false"/>
                                        <h:outputText rendered="#{!datasetVersionServiceBean.isVersionDefaultCustomTerms(DatasetPage.workingVersion)}" value="#{bundle['file.dataFilesTab.terms.list.license.customterms.txt']}" escape="false"/>
diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml
index 936d354e9d7..4cece74b67a 100644
--- a/src/main/webapp/dataset.xhtml
+++ b/src/main/webapp/dataset.xhtml
@@ -522,6 +522,16 @@
                                                 </p:commandLink>
                                             </div>
                                             <!-- END: LINK -->
+                                            <!-- UNLINK -->
+                                            <div class="btn-group btn-group-justified" jsf:rendered="#{dataverseSession.user.authenticated and !DatasetPage.workingVersion.deaccessioned and DatasetPage.dataset.released and !empty DatasetPage.dataset.datasetLinkingDataverses}">
+                                                <p:commandLink styleClass="btn btn-default btn-access btn-xs btn-block btn-link-dataset"
+                                                               action="#{DatasetPage.setShowUnLinkingPopup(true)}"
+                                                               oncomplete="PF('unlinkDatasetForm').show();"
+                                                               update="@form">
+                                                    #{bundle['dataset.unlinkBtn']}
+                                                </p:commandLink>
+                                            </div>
+                                            <!-- END: UNLINK -->
 
                                             <!-- Contact/Share Button Group -->
                                             <div class="btn-group btn-group-justified">
@@ -1697,6 +1707,72 @@
                         </div>
                     </p:dialog>
                     <p:remoteCommand name="linkDatasetCommand" oncomplete="PF('linkDatasetForm').hide();" update=":messagePanel  @([id$=Messages])" actionListener="#{DatasetPage.saveLinkingDataverses}"/>
+                    <p:dialog id="unlinkDatasetForm" styleClass="largePopUp" header="#{bundle['dataset.unlink.title']}" widgetVar="unlinkDatasetForm" modal="true" rendered="#{DatasetPage.showUnLinkingPopup}">
+                        <p:focus for="dataverseUnLinkName"/>
+                        <div class="form-horizontal">
+                            <p class="help-block">
+                                <h:outputFormat value="#{bundle['dataverse.unlink.dataset.choose']}" escape="false">
+                                    <o:param>
+                                        <p:commandLink value="#{settingsWrapper.supportTeamName}" oncomplete="PF('unlinkDatasetForm').hide();PF('contactForm').show()" update=":contactDialog" actionListener="#{sendFeedbackDialog.initUserInput}">
+                                            <f:setPropertyActionListener target="#{sendFeedbackDialog.messageSubject}" value=""/>
+                                            <f:setPropertyActionListener target="#{sendFeedbackDialog.recipient}" value="#{null}"/>
+                                            <f:setPropertyActionListener target="#{sendFeedbackDialog.userMessage}" value=""/>
+                                            <f:setPropertyActionListener target="#{sendFeedbackDialog.userEmail}" value=""/>
+                                        </p:commandLink>
+                                    </o:param>
+                                </h:outputFormat>
+                            </p>
+                            <div class="form-group">
+                                <label class="col-xs-3 control-label">
+                                    <h:outputText value="#{bundle['dataverse.link.yourDataverses']}"/>
+                                </label>
+                                <div class="col-xs-8">
+                                    <p:fragment id="unlinkNameContent">
+                                        <p:autoComplete id="dataverseUnLinkName"
+                                                        placeholder="#{bundle['dataverse.link.yourDataverses.inputPlaceholder']}"
+                                                        emptyMessage="#{bundle['dataverse.unlink.dataset.none']}"
+                                                        scrollHeight="180" forceSelection="true"
+                                                        minQueryLength="1" queryDelay="1000"
+                                                        value="#{DatasetPage.selectedDataverseForLinking}"
+                                                        multiple="false"
+                                                        completeMethod="#{DatasetPage.completeUnLinkingDataverse}"
+                                                        required="#{param['DO_DS_LINK_VALIDATION']}" requiredMessage="#{bundle['dataverse.link.select']}"
+                                                        styleClass="DropdownPopup" panelStyleClass="DropdownPopupPanel"
+                                                        var="dataverseLk" itemLabel="#{dataverseLk.displayName}" itemValue="#{dataverseLk}" converter="dataverseConverter">
+                                            <p:column>
+                                                <h:outputText value="#{dataverseLk.displayName}"/>
+                                            </p:column>
+                                            <p:column>
+                                                <h:outputText value="#{dataverseLk.alias}"/>
+                                            </p:column>
+                                            <p:ajax process="@this" event="itemSelect" />
+                                            <p:ajax process="@this" event="itemUnselect" />
+                                        </p:autoComplete>
+                                        <p:message for="dataverseUnLinkName"/>
+                                    </p:fragment>
+                                </div>
+                            </div>
+                        </div>
+                        <div>
+                            <p:fragment rendered="#{empty DatasetPage.alreadyLinkedDataverses}">
+                                <h:outputLabel value="#{bundle['dataset.linking.popup.not.linked.note']}"/>&#160;
+                                <h:outputText value=""/>
+                            </p:fragment>
+                        </div>
+                        <div class="button-block">
+                            <p:commandButton id="deleteLinkButton" styleClass="btn btn-default"
+                                             update="linkNameContent @([id$=Messages])"
+                                             oncomplete="if (args &amp;&amp; !args.validationFailed) unlinkDatasetCommand();"
+                                             value="#{bundle['dataset.unlink.delete']}">
+                                <f:param name="DO_DS_LINK_VALIDATION" value="true"/>
+                            </p:commandButton>
+                            <button class="btn btn-link" onclick="PF('unlinkDatasetForm').hide();
+                                    PF('blockDatasetForm').hide();" type="button">
+                                #{bundle.cancel}
+                            </button>
+                        </div>
+                    </p:dialog>
+                    <p:remoteCommand name="unlinkDatasetCommand" oncomplete="PF('unlinkDatasetForm').hide();" update=":messagePanel  @([id$=Messages])" actionListener="#{DatasetPage.deleteLinkingDataverses}"/>
                     <p:dialog id="computeBatchListPopup" header="#{bundle['dataset.compute.computeBatchListHeader']}" widgetVar="computeBatchListPopup" modal="true">
                         <div class="text-right">
                             <!-- Clear link -->
diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml
index d53c4bf4709..6eec4f100b1 100644
--- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml
+++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml
@@ -48,7 +48,7 @@
                     </a>
                     <ui:fragment
                         rendered="#{empty workingVersion.termsOfUseAndAccess.license}">
-                        <h:outputText rendered="#{!datasetVersionServiceBean.isVersionDefaultCustomTerms(DatasetPage.workingVersion)}"
+                        <h:outputText rendered="#{!datasetVersionServiceBean.isVersionDefaultCustomTerms(workingVersion)}"
                             value="#{bundle['file.dataFilesTab.terms.list.license.customterms.txt']}"
                             escape="false" />
                     </ui:fragment>
diff --git a/src/main/webapp/template.xhtml b/src/main/webapp/template.xhtml
index 280d5ed05b9..12dca612f1b 100644
--- a/src/main/webapp/template.xhtml
+++ b/src/main/webapp/template.xhtml
@@ -114,7 +114,7 @@
                     </ui:fragment>
                     <!-- Create/Save Dataset Button Panel -->
                     <div class="button-block" jsf:rendered="#{!empty TemplatePage.editMode}">
-                        <p:commandButton styleClass="btn btn-default" action="#{TemplatePage.save('Terms')}" update=":@form,messagePanel"
+                        <p:commandButton styleClass="btn btn-default" action="#{TemplatePage.save('Terms')}" update="@form,messagePanel"
                                          value="#{bundle['dataset.create.add.terms']}"
                                          rendered="#{TemplatePage.editMode == 'CREATE'}">  
                              <f:ajax onerror="window.scrollTo(0, 0)"/>
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTest.java
new file mode 100644
index 00000000000..97999af3244
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTest.java
@@ -0,0 +1,64 @@
+package edu.harvard.iq.dataverse;
+
+import edu.harvard.iq.dataverse.DatasetFieldType.FieldType;
+import edu.harvard.iq.dataverse.mocks.MocksFactory;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertNull;
+
+
+class DatasetFieldTest {
+    @Test
+    void testCreateNewEmptyDatasetField_withEmptyTemplate() {
+        Template template = new Template();
+        
+        DatasetField field = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), template);
+        assertTrue(field.getTemplate() == template);
+        assertTrue(template.getDatasetFields().isEmpty());
+    }
+
+    @Test
+    void testNotEqualDatasetFields() {
+        DatasetFieldType type1 = new DatasetFieldType("subject", FieldType.TEXT, false);
+        Template template1 = new Template();
+        DatasetField field1 = DatasetField.createNewEmptyDatasetField(type1, template1);
+        field1.setId(MocksFactory.nextId());
+        DatasetFieldType type2 = new DatasetFieldType("subject", FieldType.TEXT, false);
+        Template template2 = new Template();
+        DatasetField field2 = DatasetField.createNewEmptyDatasetField(type2, template2);
+        field2.setId(MocksFactory.nextId());
+
+        assertNotEquals(field1, field2);
+        assertNotEquals(field1, template2);
+    }
+
+    @Test
+    void testEqualDatasetFields() {
+        DatasetField field1 = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), new Template());
+        field1.setId(100L);
+        DatasetField field2 = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), new Template());
+
+        // Fields are not equal before both have IDs set
+        assertNotEquals(field1, field2);
+        
+        field2.setId(100L);
+
+        assertEquals(field1, field2);
+    }
+
+    @Test
+    void testCopyDatasetFields() {
+        DatasetField field1 = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), new Template());
+        field1.setId(100L);
+        DatasetField field2 = field1.copy(field1.getTemplate());
+
+        assertNull(field2.getId());
+        // A copy of a field should not be equal
+        assertNotEquals(field1, field2);
+
+        assertEquals(field2.getDatasetFieldType(), field1.getDatasetFieldType());
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldsIT.java
new file mode 100644
index 00000000000..ae90ddf0b4c
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldsIT.java
@@ -0,0 +1,29 @@
+package edu.harvard.iq.dataverse.api;
+
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class DatasetFieldsIT {
+
+    @BeforeAll
+    public static void setUpClass() {
+        RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
+    }
+
+    @Test
+    void testListAllFacetableDatasetFields() {
+        Response listAllFacetableDatasetFieldsResponse = UtilIT.listAllFacetableDatasetFields();
+        listAllFacetableDatasetFieldsResponse.then().assertThat().statusCode(OK.getStatusCode());
+        int expectedNumberOfFacetableDatasetFields = 59;
+        listAllFacetableDatasetFieldsResponse.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].name", equalTo("authorName"))
+                .body("data[0].displayName", equalTo("Author Name"))
+                .body("data.size()", equalTo(expectedNumberOfFacetableDatasetFields));
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
index cb9481d3491..3ff580268a9 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
@@ -182,6 +182,90 @@ public void testCollectionSchema(){
         
     }
 
+    @Test
+    public void testDatasetSchemaValidation() {
+
+        Response createUser = UtilIT.createRandomUser();
+        createUser.prettyPrint();
+        String username = UtilIT.getUsernameFromResponse(createUser);
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.prettyPrint();
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response getCollectionSchemaResponse =  UtilIT.getCollectionSchema(dataverseAlias, apiToken);
+        getCollectionSchemaResponse.prettyPrint();
+        getCollectionSchemaResponse.then().assertThat()
+                .statusCode(200);
+
+        JsonObject expectedSchema = null;
+        try {
+            expectedSchema = JsonUtil.getJsonObjectFromFile("doc/sphinx-guides/source/_static/api/dataset-schema.json");
+        } catch (IOException ex) {
+        }
+
+        assertEquals(JsonUtil.prettyPrint(expectedSchema), JsonUtil.prettyPrint(getCollectionSchemaResponse.body().asString()));
+
+        // add a language that is not in the Controlled vocabulary
+        testDatasetSchemaValidationHelper(dataverseAlias, apiToken,
+                "\"aar\"",
+                "\"aar\",\"badlang\"",
+                BundleUtil.getStringFromBundle("schema.validation.exception.dataset.cvv.missing", List.of("fields", "language", "badlang"))
+        );
+
+        // change multiple to true on value that is a not a List
+        testDatasetSchemaValidationHelper(dataverseAlias, apiToken,
+                "multiple\": false,\n" +
+                        "            \"typeName\": \"title",
+                "multiple\": true,\n" +
+                        "            \"typeName\": \"title",
+                BundleUtil.getStringFromBundle("schema.validation.exception.notlist.multiple", List.of("fields", "title"))
+        );
+
+        // change multiple to false on value that is a List
+        testDatasetSchemaValidationHelper(dataverseAlias, apiToken,
+                "typeName\": \"language\",\n" +
+                        "            \"multiple\": true",
+                "typeName\": \"language\",\n" +
+                        "            \"multiple\": false",
+                BundleUtil.getStringFromBundle("schema.validation.exception.list.notmultiple", List.of("fields", "language"))
+        );
+
+        // add a mismatched typeName
+        testDatasetSchemaValidationHelper(dataverseAlias, apiToken,
+                "\"typeName\": \"datasetContactName\",",
+                "\"typeName\": \"datasetContactNme\",",
+                BundleUtil.getStringFromBundle("schema.validation.exception.compound.mismatch", List.of("datasetContactName", "datasetContactNme"))
+        );
+
+        // add a typeName which is not allowed
+        testDatasetSchemaValidationHelper(dataverseAlias, apiToken,
+                "\"datasetContactEmail\": {\n" +
+                        "                  \"typeClass\": \"primitive\",\n" +
+                        "                  \"multiple\": false,\n" +
+                        "                  \"typeName\": \"datasetContactEmail\",",
+                "\"datasetContactNotAllowed\": {\n" +
+                        "                  \"typeClass\": \"primitive\",\n" +
+                        "                  \"multiple\": false,\n" +
+                        "                  \"typeName\": \"datasetContactNotAllowed\",",
+                BundleUtil.getStringFromBundle("schema.validation.exception.dataset.invalidType", List.of("datasetContact", "datasetContactNotAllowed", "datasetContactName, datasetContactAffiliation, datasetContactEmail"))
+        );
+
+        Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken);
+        deleteDataverseResponse.prettyPrint();
+        assertEquals(200, deleteDataverseResponse.getStatusCode());
+    }
+    private void testDatasetSchemaValidationHelper(String dataverseAlias, String apiToken, String origString, String replacementString, String expectedError) {
+        String json = UtilIT.getDatasetJson("scripts/search/tests/data/dataset-finch3.json");
+        json = json.replace(origString, replacementString);
+        Response validateDatasetJsonResponse = UtilIT.validateDatasetJson(dataverseAlias, json, apiToken);
+        validateDatasetJsonResponse.prettyPrint();
+        validateDatasetJsonResponse.then().assertThat()
+                .statusCode(200)
+                .body(containsString(expectedError));
+    }
+
     @Test
     public void testCreateDataset() {
 
@@ -2411,6 +2495,7 @@ public void testCreateDeleteDatasetLink() {
         Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
         createDatasetResponse.prettyPrint();
         Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse);
+        String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse);
         
         // This should fail, because we are attempting to link the dataset 
         // to its own dataverse:
@@ -2445,11 +2530,44 @@ public void testCreateDeleteDatasetLink() {
         createLinkingDatasetResponse.then().assertThat()
                 .body("data.message", equalTo("Dataset " + datasetId +" linked successfully to " + dataverseAlias))
                 .statusCode(200);
-        
-        // And now test deleting it:
-        Response deleteLinkingDatasetResponse = UtilIT.deleteDatasetLink(datasetId.longValue(), dataverseAlias, apiToken);
+
+        // Create a new user that doesn't have permission to delete the link
+        Response createUser2 = UtilIT.createRandomUser();
+        createUser2.prettyPrint();
+        String username2 = UtilIT.getUsernameFromResponse(createUser2);
+        String apiToken2 = UtilIT.getApiTokenFromResponse(createUser2);
+        // Try to delete the link without PublishDataset permissions
+        Response deleteLinkingDatasetResponse = UtilIT.deleteDatasetLink(datasetId.longValue(), dataverseAlias, apiToken2);
         deleteLinkingDatasetResponse.prettyPrint();
-        
+        deleteLinkingDatasetResponse.then().assertThat()
+                .body("message", equalTo("User @" + username2 + " is not permitted to perform requested action."))
+                .statusCode(UNAUTHORIZED.getStatusCode());
+
+        // Add the Curator role to this user to show that they can delete the link later. (Timing issues if you try to delete right after giving permission)
+        Response givePermissionResponse = UtilIT.grantRoleOnDataset(datasetPersistentId, "curator", "@" + username2, apiToken);
+        givePermissionResponse.prettyPrint();
+        givePermissionResponse.then().assertThat()
+                .statusCode(200);
+
+        // And now test deleting it as superuser:
+        deleteLinkingDatasetResponse = UtilIT.deleteDatasetLink(datasetId.longValue(), dataverseAlias, apiToken);
+        deleteLinkingDatasetResponse.prettyPrint();
+
+        deleteLinkingDatasetResponse.then().assertThat()
+                .body("data.message", equalTo("Link from Dataset " + datasetId + " to linked Dataverse " + dataverseAlias + " deleted"))
+                .statusCode(200);
+
+        // And re-link the dataset to this new dataverse:
+        createLinkingDatasetResponse = UtilIT.createDatasetLink(datasetId.longValue(), dataverseAlias, apiToken);
+        createLinkingDatasetResponse.prettyPrint();
+        createLinkingDatasetResponse.then().assertThat()
+                .body("data.message", equalTo("Dataset " + datasetId +" linked successfully to " + dataverseAlias))
+                .statusCode(200);
+
+        // And now test deleting it as user2 with new role as curator (Publish permissions):
+        deleteLinkingDatasetResponse = UtilIT.deleteDatasetLink(datasetId.longValue(), dataverseAlias, apiToken2);
+        deleteLinkingDatasetResponse.prettyPrint();
+
         deleteLinkingDatasetResponse.then().assertThat()
                 .body("data.message", equalTo("Link from Dataset " + datasetId + " to linked Dataverse " + dataverseAlias + " deleted"))
                 .statusCode(200);
@@ -3542,17 +3660,47 @@ public void testCuratePublishedDatasetVersionCommand() throws IOException {
 
         UtilIT.publishDatasetViaNativeApi(datasetId, "updatecurrent", apiToken).then().assertThat().statusCode(FORBIDDEN.getStatusCode());
         
-        Response makeSuperUser = UtilIT.makeSuperUser(username);
+        Response makeSuperUser = UtilIT.setSuperuserStatus(username, true);
                 
         //should work after making super user
         
         UtilIT.publishDatasetViaNativeApi(datasetId, "updatecurrent", apiToken).then().assertThat().statusCode(OK.getStatusCode());
         
+        //Check that the dataset contains the updated metadata (which includes the name Spruce)
         Response getDatasetJsonAfterUpdate = UtilIT.nativeGet(datasetId, apiToken);
-        getDatasetJsonAfterUpdate.prettyPrint();
+        assertTrue(getDatasetJsonAfterUpdate.prettyPrint().contains("Spruce"));
         getDatasetJsonAfterUpdate.then().assertThat()
                 .statusCode(OK.getStatusCode());
         
+        //Check that the draft version is gone
+        Response getDraft1 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken);
+        getDraft1.then().assertThat()
+        .statusCode(NOT_FOUND.getStatusCode());
+
+        
+        //Also test a terms change
+        String jsonLDTerms = "{\"https://dataverse.org/schema/core#fileTermsOfAccess\":{\"https://dataverse.org/schema/core#dataAccessPlace\":\"Somewhere\"}}";
+        Response updateTerms = UtilIT.updateDatasetJsonLDMetadata(datasetId, apiToken, jsonLDTerms, true);
+        updateTerms.then().assertThat()
+                .statusCode(OK.getStatusCode());
+        
+        //Run Update-Current Version again
+        
+        UtilIT.publishDatasetViaNativeApi(datasetId, "updatecurrent", apiToken).then().assertThat().statusCode(OK.getStatusCode());
+
+        
+        //Verify the new term is there
+        Response jsonLDResponse = UtilIT.getDatasetJsonLDMetadata(datasetId, apiToken);
+        assertTrue(jsonLDResponse.prettyPrint().contains("Somewhere"));
+        jsonLDResponse.then().assertThat()
+                .statusCode(OK.getStatusCode());
+        
+        //And that the draft is gone
+        Response getDraft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken);
+        getDraft2.then().assertThat()
+        .statusCode(NOT_FOUND.getStatusCode());
+       
+        
     }
     
     /**
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
index 79cc46cfa79..d682e4ade98 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
@@ -25,7 +25,6 @@
 
 import static jakarta.ws.rs.core.Response.Status.*;
 import static org.hamcrest.CoreMatchers.*;
-import static org.hamcrest.CoreMatchers.containsString;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.Matchers.hasItemInArray;
 import static org.junit.jupiter.api.Assertions.*;
@@ -702,9 +701,9 @@ public void testListMetadataBlocks() {
         Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken);
         setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode());
 
-        String[] testInputLevelNames = {"geographicCoverage", "country", "city"};
-        boolean[] testRequiredInputLevels = {false, true, false};
-        boolean[] testIncludedInputLevels = {false, true, true};
+        String[] testInputLevelNames = {"geographicCoverage", "country", "city", "notesText"};
+        boolean[] testRequiredInputLevels = {false, true, false, false};
+        boolean[] testIncludedInputLevels = {false, true, true, false};
         Response updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, apiToken);
         updateDataverseInputLevelsResponse.then().assertThat().statusCode(OK.getStatusCode());
 
@@ -774,17 +773,22 @@ public void testListMetadataBlocks() {
         // Check dataset fields for the updated input levels are retrieved
         int geospatialMetadataBlockIndex = actualMetadataBlockDisplayName1.equals("Geospatial Metadata") ? 0 : actualMetadataBlockDisplayName2.equals("Geospatial Metadata") ? 1 : 2;
 
+        // Since the included property of notesText is set to false, we should retrieve the total number of fields minus one
+        int citationMetadataBlockIndex = geospatialMetadataBlockIndex == 0 ? 1 : 0;
+        listMetadataBlocksResponse.then().assertThat()
+                .body(String.format("data[%d].fields.size()", citationMetadataBlockIndex), equalTo(78));
+
         // Since the included property of geographicCoverage is set to false, we should retrieve the total number of fields minus one
         listMetadataBlocksResponse.then().assertThat()
                 .body(String.format("data[%d].fields.size()", geospatialMetadataBlockIndex), equalTo(10));
 
-        String actualMetadataField1 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.geographicCoverage.name", geospatialMetadataBlockIndex));
-        String actualMetadataField2 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.country.name", geospatialMetadataBlockIndex));
-        String actualMetadataField3 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.city.name", geospatialMetadataBlockIndex));
+        String actualGeospatialMetadataField1 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.geographicCoverage.name", geospatialMetadataBlockIndex));
+        String actualGeospatialMetadataField2 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.country.name", geospatialMetadataBlockIndex));
+        String actualGeospatialMetadataField3 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.city.name", geospatialMetadataBlockIndex));
 
-        assertNull(actualMetadataField1);
-        assertNotNull(actualMetadataField2);
-        assertNotNull(actualMetadataField3);
+        assertNull(actualGeospatialMetadataField1);
+        assertNotNull(actualGeospatialMetadataField2);
+        assertNotNull(actualGeospatialMetadataField3);
 
         // Existent dataverse and onlyDisplayedOnCreate=true and returnDatasetFieldTypes=true
         listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken);
@@ -807,13 +811,27 @@ public void testListMetadataBlocks() {
         listMetadataBlocksResponse.then().assertThat()
                 .body(String.format("data[%d].fields.size()", geospatialMetadataBlockIndex), equalTo(1));
 
-        actualMetadataField1 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.geographicCoverage.name", geospatialMetadataBlockIndex));
-        actualMetadataField2 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.country.name", geospatialMetadataBlockIndex));
-        actualMetadataField3 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.city.name", geospatialMetadataBlockIndex));
+        actualGeospatialMetadataField1 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.geographicCoverage.name", geospatialMetadataBlockIndex));
+        actualGeospatialMetadataField2 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.country.name", geospatialMetadataBlockIndex));
+        actualGeospatialMetadataField3 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.city.name", geospatialMetadataBlockIndex));
+
+        assertNull(actualGeospatialMetadataField1);
+        assertNotNull(actualGeospatialMetadataField2);
+        assertNull(actualGeospatialMetadataField3);
+
+        citationMetadataBlockIndex = geospatialMetadataBlockIndex == 0 ? 1 : 0;
+
+        // notesText has displayOnCreate=true but has include=false, so should not be retrieved
+        String notesTextCitationMetadataField = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.notesText.name", citationMetadataBlockIndex));
+        assertNull(notesTextCitationMetadataField);
+
+        // producerName is a conditionally required field, so should not be retrieved
+        String producerNameCitationMetadataField = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.producerName.name", citationMetadataBlockIndex));
+        assertNull(producerNameCitationMetadataField);
 
-        assertNull(actualMetadataField1);
-        assertNotNull(actualMetadataField2);
-        assertNull(actualMetadataField3);
+        // author is a required field, so should be retrieved
+        String authorCitationMetadataField = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.author.name", citationMetadataBlockIndex));
+        assertNotNull(authorCitationMetadataField);
 
         // User has no permissions on the requested dataverse
         Response createSecondUserResponse = UtilIT.createRandomUser();
@@ -825,6 +843,15 @@ public void testListMetadataBlocks() {
 
         listMetadataBlocksResponse = UtilIT.listMetadataBlocks(secondDataverseAlias, true, true, apiToken);
         listMetadataBlocksResponse.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode());
+
+        // List metadata blocks from Root
+        listMetadataBlocksResponse = UtilIT.listMetadataBlocks("root", true, true, apiToken);
+        listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode());
+        listMetadataBlocksResponse.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].displayName", equalTo("Citation Metadata"))
+                .body("data[0].fields", not(equalTo(null)))
+                .body("data.size()", equalTo(1));
     }
 
     @Test
@@ -941,6 +968,13 @@ public void testUpdateInputLevels() {
                 .body("message", equalTo("Invalid dataset field type name: invalid1"))
                 .statusCode(BAD_REQUEST.getStatusCode());
 
+        // Update input levels with invalid configuration (field required but not included)
+        testIncludedInputLevels = new boolean[]{false, false};
+        updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, apiToken);
+        updateDataverseInputLevelsResponse.then().assertThat()
+                .body("message", equalTo(BundleUtil.getStringFromBundle("dataverse.inputlevels.error.cannotberequiredifnotincluded", List.of("geographicCoverage"))))
+                .statusCode(BAD_REQUEST.getStatusCode());
+
         // Update invalid empty input levels
         testInputLevelNames = new String[]{};
         updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, apiToken);
@@ -948,4 +982,197 @@ public void testUpdateInputLevels() {
                 .body("message", equalTo("Error while updating dataverse input levels: Input level list cannot be null or empty"))
                 .statusCode(INTERNAL_SERVER_ERROR.getStatusCode());
     }
+
+    @Test
+    public void testAddDataverse() {
+        Response createUser = UtilIT.createRandomUser();
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+        String testAliasSuffix = "-add-dataverse";
+
+        // Without optional input levels and facet ids
+        String testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix;
+        Response createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root");
+        createSubDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        Response listMetadataBlocksResponse = UtilIT.listMetadataBlocks(testDataverseAlias, false, false, apiToken);
+        listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode());
+        String actualMetadataBlockName = listMetadataBlocksResponse.then().extract().path("data[0].name");
+        assertEquals(actualMetadataBlockName, "citation");
+
+        // With optional input levels and facet ids
+        String[] testInputLevelNames = {"geographicCoverage", "country"};
+        String[] testFacetIds = {"authorName", "authorAffiliation"};
+        String[] testMetadataBlockNames = {"citation", "geospatial"};
+        testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix;
+        createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testFacetIds, testMetadataBlockNames);
+        createSubDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+
+        // Assert facets are configured
+        Response listDataverseFacetsResponse = UtilIT.listDataverseFacets(testDataverseAlias, apiToken);
+        String actualFacetName1 = listDataverseFacetsResponse.then().extract().path("data[0]");
+        String actualFacetName2 = listDataverseFacetsResponse.then().extract().path("data[1]");
+        assertNotEquals(actualFacetName1, actualFacetName2);
+        assertThat(testFacetIds, hasItemInArray(actualFacetName1));
+        assertThat(testFacetIds, hasItemInArray(actualFacetName2));
+
+        // Assert input levels are configured
+        Response listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(testDataverseAlias, apiToken);
+        String actualInputLevelName1 = listDataverseInputLevelsResponse.then().extract().path("data[0].datasetFieldTypeName");
+        String actualInputLevelName2 = listDataverseInputLevelsResponse.then().extract().path("data[1].datasetFieldTypeName");
+        assertNotEquals(actualFacetName1, actualFacetName2);
+        assertThat(testInputLevelNames, hasItemInArray(actualInputLevelName1));
+        assertThat(testInputLevelNames, hasItemInArray(actualInputLevelName2));
+
+        // Assert metadata blocks are configured
+        listMetadataBlocksResponse = UtilIT.listMetadataBlocks(testDataverseAlias, false, false, apiToken);
+        listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode());
+        String actualMetadataBlockName1 = listMetadataBlocksResponse.then().extract().path("data[0].name");
+        String actualMetadataBlockName2 = listMetadataBlocksResponse.then().extract().path("data[1].name");
+        assertNotEquals(actualMetadataBlockName1, actualMetadataBlockName2);
+        assertThat(testMetadataBlockNames, hasItemInArray(actualMetadataBlockName1));
+        assertThat(testMetadataBlockNames, hasItemInArray(actualMetadataBlockName2));
+
+        // Setting metadata blocks without citation
+        testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix;
+        String[] testMetadataBlockNamesWithoutCitation = {"geospatial"};
+        createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", null, null, testMetadataBlockNamesWithoutCitation);
+        createSubDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+
+        // Assert metadata blocks including citation are configured
+        String[] testExpectedBlockNames = {"citation", "geospatial"};
+        actualMetadataBlockName1 = listMetadataBlocksResponse.then().extract().path("data[0].name");
+        actualMetadataBlockName2 = listMetadataBlocksResponse.then().extract().path("data[1].name");
+        assertNotEquals(actualMetadataBlockName1, actualMetadataBlockName2);
+        assertThat(testExpectedBlockNames, hasItemInArray(actualMetadataBlockName1));
+        assertThat(testExpectedBlockNames, hasItemInArray(actualMetadataBlockName2));
+
+        // Should return error when an invalid facet id is sent
+        String invalidFacetId = "invalidFacetId";
+        String[] testInvalidFacetIds = {"authorName", invalidFacetId};
+        testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix;
+        createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testInvalidFacetIds, testMetadataBlockNames);
+        createSubDataverseResponse.then().assertThat()
+                .statusCode(BAD_REQUEST.getStatusCode())
+                .body("message", equalTo("Cant find dataset field type \"" + invalidFacetId + "\""));
+
+        // Should return error when an invalid input level is sent
+        String invalidInputLevelName = "wrongInputLevel";
+        String[] testInvalidInputLevelNames = {"geographicCoverage", invalidInputLevelName};
+        testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix;
+        createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInvalidInputLevelNames, testFacetIds, testMetadataBlockNames);
+        createSubDataverseResponse.then().assertThat()
+                .statusCode(BAD_REQUEST.getStatusCode())
+                .body("message", equalTo("Invalid dataset field type name: " + invalidInputLevelName));
+
+        // Should return error when an invalid metadata block name is sent
+        String invalidMetadataBlockName = "invalidMetadataBlockName";
+        String[] testInvalidMetadataBlockNames = {"citation", invalidMetadataBlockName};
+        testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix;
+        createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root", testInputLevelNames, testInvalidFacetIds, testInvalidMetadataBlockNames);
+        createSubDataverseResponse.then().assertThat()
+                .statusCode(BAD_REQUEST.getStatusCode())
+                .body("message", equalTo("Invalid metadata block name: \"" + invalidMetadataBlockName + "\""));
+    }
+
+    @Test
+    public void testListFacets() {
+        Response createUserResponse = UtilIT.createRandomUser();
+        String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse);
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        String[] expectedFacetNames = {"authorName", "subject", "keywordValue", "dateOfDeposit"};
+
+        // returnDetails is false
+        Response listFacetsResponse = UtilIT.listDataverseFacets(dataverseAlias, false, apiToken);
+        listFacetsResponse.then().assertThat().statusCode(OK.getStatusCode());
+        String actualFacetName = listFacetsResponse.then().extract().path("data[0]");
+        assertThat(expectedFacetNames, hasItemInArray(actualFacetName));
+
+        // returnDetails is true
+        String[] expectedDisplayNames = {"Author Name", "Subject", "Keyword Term", "Deposit Date"};
+        listFacetsResponse = UtilIT.listDataverseFacets(dataverseAlias, true, apiToken);
+        listFacetsResponse.then().assertThat().statusCode(OK.getStatusCode());
+        actualFacetName = listFacetsResponse.then().extract().path("data[0].name");
+        assertThat(expectedFacetNames, hasItemInArray(actualFacetName));
+        String actualDisplayName = listFacetsResponse.then().extract().path("data[0].displayName");
+        assertThat(expectedDisplayNames, hasItemInArray(actualDisplayName));
+        String actualId = listFacetsResponse.then().extract().path("data[0].id");
+        assertNotNull(actualId);
+
+        // Dataverse with custom facets
+        String dataverseWithCustomFacetsAlias = UtilIT.getRandomDvAlias() + "customFacets";
+
+        String[] testFacetNames = {"authorName", "authorAffiliation"};
+        String[] testMetadataBlockNames = {"citation", "geospatial"};
+
+        Response createSubDataverseResponse = UtilIT.createSubDataverse(dataverseWithCustomFacetsAlias, null, apiToken, "root", null, testFacetNames, testMetadataBlockNames);
+        createSubDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+
+        listFacetsResponse = UtilIT.listDataverseFacets(dataverseWithCustomFacetsAlias, true, apiToken);
+        listFacetsResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        String actualFacetName1 = listFacetsResponse.then().extract().path("data[0].name");
+        String actualFacetName2 = listFacetsResponse.then().extract().path("data[1].name");
+        assertNotEquals(actualFacetName1, actualFacetName2);
+        assertThat(testFacetNames, hasItemInArray(actualFacetName1));
+        assertThat(testFacetNames, hasItemInArray(actualFacetName2));
+
+        String[] testFacetExpectedDisplayNames = {"Author Name", "Author Affiliation"};
+        String actualFacetDisplayName1 = listFacetsResponse.then().extract().path("data[0].displayName");
+        String actualFacetDisplayName2 = listFacetsResponse.then().extract().path("data[1].displayName");
+        assertNotEquals(actualFacetDisplayName1, actualFacetDisplayName2);
+        assertThat(testFacetExpectedDisplayNames, hasItemInArray(actualFacetDisplayName1));
+        assertThat(testFacetExpectedDisplayNames, hasItemInArray(actualFacetDisplayName2));
+    }
+
+    @Test
+    public void testGetUserPermissionsOnDataverse() {
+        Response createUserResponse = UtilIT.createRandomUser();
+        String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        // Call for dataverse created by the user
+        Response getUserPermissionsOnDataverseResponse = UtilIT.getUserPermissionsOnDataverse(dataverseAlias, apiToken);
+        getUserPermissionsOnDataverseResponse.then().assertThat().statusCode(OK.getStatusCode());
+        boolean canAddDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canAddDataverse");
+        assertTrue(canAddDataverse);
+        boolean canAddDataset = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canAddDataset");
+        assertTrue(canAddDataset);
+        boolean canViewUnpublishedDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canViewUnpublishedDataverse");
+        assertTrue(canViewUnpublishedDataverse);
+        boolean canEditDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canEditDataverse");
+        assertTrue(canEditDataverse);
+        boolean canManageDataversePermissions = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canManageDataversePermissions");
+        assertTrue(canManageDataversePermissions);
+        boolean canPublishDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canPublishDataverse");
+        assertTrue(canPublishDataverse);
+        boolean canDeleteDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canDeleteDataverse");
+        assertTrue(canDeleteDataverse);
+
+        // Call for root dataverse
+        getUserPermissionsOnDataverseResponse = UtilIT.getUserPermissionsOnDataverse("root", apiToken);
+        getUserPermissionsOnDataverseResponse.then().assertThat().statusCode(OK.getStatusCode());
+        canAddDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canAddDataverse");
+        assertTrue(canAddDataverse);
+        canAddDataset = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canAddDataset");
+        assertTrue(canAddDataset);
+        canViewUnpublishedDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canViewUnpublishedDataverse");
+        assertFalse(canViewUnpublishedDataverse);
+        canEditDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canEditDataverse");
+        assertFalse(canEditDataverse);
+        canManageDataversePermissions = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canManageDataversePermissions");
+        assertFalse(canManageDataversePermissions);
+        canPublishDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canPublishDataverse");
+        assertFalse(canPublishDataverse);
+        canDeleteDataverse = JsonPath.from(getUserPermissionsOnDataverseResponse.body().asString()).getBoolean("data.canDeleteDataverse");
+        assertFalse(canDeleteDataverse);
+
+        // Call with invalid dataverse alias
+        Response getUserPermissionsOnDataverseInvalidIdResponse = UtilIT.getUserPermissionsOnDataverse("testInvalidAlias", apiToken);
+        getUserPermissionsOnDataverseInvalidIdResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+    }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java
index 5f5a7fbc0f8..183d687b405 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java
@@ -42,16 +42,20 @@ void testListMetadataBlocks() {
 
         // returnDatasetFieldTypes=true
         listMetadataBlocksResponse = UtilIT.listMetadataBlocks(false, true);
+        int expectedNumberOfMetadataFields = 79;
         listMetadataBlocksResponse.then().assertThat()
                 .statusCode(OK.getStatusCode())
                 .body("data[0].fields", not(equalTo(null)))
+                .body("data[0].fields.size()", equalTo(expectedNumberOfMetadataFields))
                 .body("data.size()", equalTo(expectedDefaultNumberOfMetadataBlocks));
 
         // onlyDisplayedOnCreate=true and returnDatasetFieldTypes=true
         listMetadataBlocksResponse = UtilIT.listMetadataBlocks(true, true);
+        expectedNumberOfMetadataFields = 25;
         listMetadataBlocksResponse.then().assertThat()
                 .statusCode(OK.getStatusCode())
                 .body("data[0].fields", not(equalTo(null)))
+                .body("data[0].fields.size()", equalTo(expectedNumberOfMetadataFields))
                 .body("data[0].displayName", equalTo("Citation Metadata"))
                 .body("data.size()", equalTo(expectedOnlyDisplayedOnCreateNumberOfMetadataBlocks));
     }
@@ -67,7 +71,7 @@ void testGetMetadataBlock() {
                 .body("data.fields.title.typeClass", CoreMatchers.is("primitive"))
                 .body("data.fields.title.isRequired", CoreMatchers.is(true));
     }
-    
+
     @Test
     void testDatasetWithAllDefaultMetadata() {
         // given
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java
new file mode 100644
index 00000000000..90357596c25
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SavedSearchIT.java
@@ -0,0 +1,200 @@
+package edu.harvard.iq.dataverse.api;
+
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import java.util.List;
+
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+public class SavedSearchIT {
+
+    @BeforeAll
+    public static void setUpClass() {
+
+    }
+
+    @AfterAll
+    public static void afterClass() {
+
+    }
+
+    @Test
+    public void testSavedSearches() {
+
+        Response createAdminUser = UtilIT.createRandomUser();
+        String adminUsername = UtilIT.getUsernameFromResponse(createAdminUser);
+        String adminApiToken = UtilIT.getApiTokenFromResponse(createAdminUser);
+        UtilIT.makeSuperUser(adminUsername);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(adminApiToken);
+        createDataverseResponse.prettyPrint();
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+        Integer dataverseId = UtilIT.getDataverseIdFromResponse(createDataverseResponse);
+
+        //dataset-finch1-nolicense.json
+        Response createDatasetResponse1 = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, adminApiToken);
+        createDatasetResponse1.prettyPrint();
+        Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse1);
+
+        Response createDatasetResponse2 = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, adminApiToken);
+        createDatasetResponse2.prettyPrint();
+        Integer datasetId2 = UtilIT.getDatasetIdFromResponse(createDatasetResponse2);
+
+        // missing body
+        Response resp = RestAssured.given()
+                .contentType("application/json")
+                .post("/api/admin/savedsearches");
+        resp.prettyPrint();
+        resp.then().assertThat()
+                .statusCode(INTERNAL_SERVER_ERROR.getStatusCode());
+
+        // creatorId null
+        resp = RestAssured.given()
+                .body(createSavedSearchJson("*", null, dataverseId, "subject_ss:Medicine, Health and Life Sciences"))
+                .contentType("application/json")
+                .post("/api/admin/savedsearches");
+        resp.prettyPrint();
+        resp.then().assertThat()
+                .statusCode(BAD_REQUEST.getStatusCode());
+
+        // creatorId string
+        resp = RestAssured.given()
+                .body(createSavedSearchJson("*", "1", dataverseId.toString(), "subject_ss:Medicine, Health and Life Sciences"))
+                .contentType("application/json")
+                .post("/api/admin/savedsearches");
+        resp.prettyPrint();
+        resp.then().assertThat()
+                .statusCode(BAD_REQUEST.getStatusCode());
+
+        // creatorId not found
+        resp = RestAssured.given()
+                .body(createSavedSearchJson("*", 9999, dataverseId, "subject_ss:Medicine, Health and Life Sciences"))
+                .contentType("application/json")
+                .post("/api/admin/savedsearches");
+        resp.prettyPrint();
+        resp.then().assertThat()
+                .statusCode(NOT_FOUND.getStatusCode());
+
+        // definitionPointId null
+        resp = RestAssured.given()
+                .body(createSavedSearchJson("*", 1, null, "subject_ss:Medicine, Health and Life Sciences"))
+                .contentType("application/json")
+                .post("/api/admin/savedsearches");
+        resp.prettyPrint();
+        resp.then().assertThat()
+                .statusCode(BAD_REQUEST.getStatusCode());
+
+        // definitionPointId string
+        resp = RestAssured.given()
+                .body(createSavedSearchJson("*", "1", "9999", "subject_ss:Medicine, Health and Life Sciences"))
+                .contentType("application/json")
+                .post("/api/admin/savedsearches");
+        resp.prettyPrint();
+        resp.then().assertThat()
+                .statusCode(BAD_REQUEST.getStatusCode());
+
+        // definitionPointId not found
+        resp = RestAssured.given()
+                .body(createSavedSearchJson("*", 1, 9999, "subject_ss:Medicine, Health and Life Sciences"))
+                .contentType("application/json")
+                .post("/api/admin/savedsearches");
+        resp.prettyPrint();
+        resp.then().assertThat()
+                .statusCode(NOT_FOUND.getStatusCode());
+
+        // missing filter
+        resp = RestAssured.given()
+                .body(createSavedSearchJson("*", 1, dataverseId))
+                .contentType("application/json")
+                .post("/api/admin/savedsearches");
+        resp.prettyPrint();
+        resp.then().assertThat()
+                .statusCode(OK.getStatusCode());
+
+        // create a saved search as superuser : OK
+        resp = RestAssured.given()
+                .body(createSavedSearchJson("*", 1, dataverseId, "subject_ss:Medicine, Health and Life Sciences"))
+                .contentType("application/json")
+                .post("/api/admin/savedsearches");
+        resp.prettyPrint();
+        resp.then().assertThat()
+                .statusCode(OK.getStatusCode());
+
+        JsonPath path = JsonPath.from(resp.body().asString());
+        Integer createdSavedSearchId = path.getInt("data.id");
+
+        // get list as non superuser : OK
+        Response getListReponse = RestAssured.given()
+                .get("/api/admin/savedsearches/list");
+        getListReponse.prettyPrint();
+        getListReponse.then().assertThat()
+                .statusCode(OK.getStatusCode());
+
+        JsonPath path2 = JsonPath.from(getListReponse.body().asString());
+        List<Object> listBeforeDelete = path2.getList("data.savedSearches");
+
+        // makelinks/all as non superuser : OK
+        Response makelinksAll = RestAssured.given()
+                .put("/api/admin/savedsearches/makelinks/all");
+        makelinksAll.prettyPrint();
+        makelinksAll.then().assertThat()
+                .statusCode(OK.getStatusCode());
+
+        //delete a saved search as non superuser : OK
+        Response deleteReponse = RestAssured.given()
+                .delete("/api/admin/savedsearches/" + createdSavedSearchId);
+        deleteReponse.prettyPrint();
+        deleteReponse.then().assertThat()
+                .statusCode(OK.getStatusCode());
+
+        // check list count minus 1
+        getListReponse = RestAssured.given()
+                .get("/api/admin/savedsearches/list");
+        getListReponse.prettyPrint();
+        JsonPath path3 = JsonPath.from(getListReponse.body().asString());
+        List<Object> listAfterDelete = path3.getList("data.savedSearches");
+        assertEquals(listBeforeDelete.size() - 1, listAfterDelete.size());
+    }
+
+    public String createSavedSearchJson(String query, Integer creatorId, Integer definitionPointId, String... filterQueries) {
+
+        JsonArrayBuilder arr = Json.createArrayBuilder();
+        for (String filterQuery : filterQueries) {
+            arr.add(filterQuery);
+        }
+
+        JsonObjectBuilder json = Json.createObjectBuilder();
+        if(query != null) json.add("query", query);
+        if(creatorId != null) json.add("creatorId", creatorId);
+        if(definitionPointId != null) json.add("definitionPointId", definitionPointId);
+        if(filterQueries.length > 0) json.add("filterQueries", arr);
+        return json.build().toString();
+    }
+
+    public String createSavedSearchJson(String query, String creatorId, String definitionPointId, String... filterQueries) {
+
+        JsonArrayBuilder arr = Json.createArrayBuilder();
+        for (String filterQuery : filterQueries) {
+            arr.add(filterQuery);
+        }
+
+        JsonObjectBuilder json = Json.createObjectBuilder();
+        if(query != null) json.add("query", query);
+        if(creatorId != null) json.add("creatorId", creatorId);
+        if(definitionPointId != null) json.add("definitionPointId", definitionPointId);
+        if(filterQueries.length > 0) json.add("filterQueries", arr);
+        return json.build().toString();
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
index 6e4fd5b0bb3..9fa13bb2939 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
@@ -25,7 +25,6 @@
 import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
 import java.awt.image.BufferedImage;
 import java.io.IOException;
-import static java.lang.Thread.sleep;
 import javax.imageio.ImageIO;
 import static jakarta.ws.rs.core.Response.Status.CREATED;
 import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
@@ -113,6 +112,7 @@ public void testSearchPermisions() throws InterruptedException {
                 .body("data.total_count", CoreMatchers.is(1))
                 .body("data.count_in_response", CoreMatchers.is(1))
                 .body("data.items[0].name", CoreMatchers.is("Darwin's Finches"))
+                .body("data.items[0].publicationStatuses", CoreMatchers.hasItems("Unpublished", "Draft"))
                 .statusCode(OK.getStatusCode());
 
         Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken1);
@@ -599,7 +599,7 @@ public void testDatasetThumbnail() {
 
         String datasetLogo = "src/main/webapp/resources/images/cc0.png";
         File datasetLogoFile = new File(datasetLogo);
-        String datasetLogoAsBase64 = datasetLogoAsBase64 = ImageThumbConverter.generateImageThumbnailFromFileAsBase64(datasetLogoFile, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
+        String datasetLogoAsBase64 = ImageThumbConverter.generateImageThumbnailFromFileAsBase64(datasetLogoFile, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
 
         if (datasetLogoAsBase64 == null) {
             Logger.getLogger(SearchIT.class.getName()).log(Level.SEVERE, "Failed to generate a base64 thumbnail from the file dataverseproject.png");
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
index 0216859b869..8f1fcdf57eb 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
@@ -358,20 +358,57 @@ public static Response getServiceDocument(String apiToken) {
     static Response createDataverse(String alias, String category, String apiToken) {
         return createSubDataverse(alias, category, apiToken, ":root");
     }
-    
+
     static Response createSubDataverse(String alias, String category, String apiToken, String parentDV) {
+        return createSubDataverse(alias, category, apiToken, parentDV, null, null, null);
+    }
+
+    static Response createSubDataverse(String alias, String category, String apiToken, String parentDV, String[] inputLevelNames, String[] facetIds, String[] metadataBlockNames) {
         JsonArrayBuilder contactArrayBuilder = Json.createArrayBuilder();
         contactArrayBuilder.add(Json.createObjectBuilder().add("contactEmail", getEmailFromUserName(getRandomIdentifier())));
         JsonArrayBuilder subjectArrayBuilder = Json.createArrayBuilder();
         subjectArrayBuilder.add("Other");
-        JsonObject dvData = Json.createObjectBuilder()
+        JsonObjectBuilder objectBuilder = Json.createObjectBuilder()
                 .add("alias", alias)
                 .add("name", alias)
                 .add("dataverseContacts", contactArrayBuilder)
                 .add("dataverseSubjects", subjectArrayBuilder)
                 // don't send "dataverseType" if category is null, must be a better way
-                .add(category != null ? "dataverseType" : "notTheKeyDataverseType", category != null ? category : "whatever")
-                .build();
+                .add(category != null ? "dataverseType" : "notTheKeyDataverseType", category != null ? category : "whatever");
+
+        JsonObjectBuilder metadataBlocksObjectBuilder = Json.createObjectBuilder();
+
+        if (inputLevelNames != null) {
+            JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder();
+            for(String inputLevelName : inputLevelNames) {
+                inputLevelsArrayBuilder.add(Json.createObjectBuilder()
+                        .add("datasetFieldTypeName", inputLevelName)
+                        .add("required", true)
+                        .add("include", true)
+                );
+            }
+            metadataBlocksObjectBuilder.add("inputLevels", inputLevelsArrayBuilder);
+        }
+
+        if (metadataBlockNames != null) {
+            JsonArrayBuilder metadataBlockNamesArrayBuilder = Json.createArrayBuilder();
+            for(String metadataBlockName : metadataBlockNames) {
+                metadataBlockNamesArrayBuilder.add(metadataBlockName);
+            }
+            metadataBlocksObjectBuilder.add("metadataBlockNames", metadataBlockNamesArrayBuilder);
+        }
+
+        objectBuilder.add("metadataBlocks", metadataBlocksObjectBuilder);
+
+        if (facetIds != null) {
+            JsonArrayBuilder facetIdsArrayBuilder = Json.createArrayBuilder();
+            for(String facetId : facetIds) {
+                facetIdsArrayBuilder.add(facetId);
+            }
+            objectBuilder.add("facetIds", facetIdsArrayBuilder);
+        }
+
+        JsonObject dvData = objectBuilder.build();
         Response createDataverseResponse = given()
                 .body(dvData.toString()).contentType(ContentType.JSON)
                 .when().post("/api/dataverses/" + parentDV + "?key=" + apiToken);
@@ -3739,6 +3776,12 @@ static Response getUserPermissionsOnDataset(String datasetId, String apiToken) {
                 .get("/api/datasets/" + datasetId + "/userPermissions");
     }
 
+    static Response getUserPermissionsOnDataverse(String dataverseAlias, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .get("/api/dataverses/" + dataverseAlias + "/userPermissions");
+    }
+
     static Response getCanDownloadAtLeastOneFile(String datasetId, String versionId, String apiToken) {
         return given()
                 .header(API_TOKEN_HTTP_HEADER, apiToken)
@@ -3986,4 +4029,27 @@ public static Response getOpenAPI(String accept, String format) {
                 .get("/openapi");
         return response;
     }
+
+    static Response listDataverseFacets(String dataverseAlias, String apiToken) {
+        return listDataverseFacets(dataverseAlias, false, apiToken);
+    }
+
+    static Response listDataverseFacets(String dataverseAlias, boolean returnDetails, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .queryParam("returnDetails", returnDetails)
+                .get("/api/dataverses/" + dataverseAlias + "/facets");
+    }
+
+    static Response listDataverseInputLevels(String dataverseAlias, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .contentType("application/json")
+                .get("/api/dataverses/" + dataverseAlias + "/inputLevels");
+    }
+
+    static Response listAllFacetableDatasetFields() {
+        return given()
+                .get("/api/datasetfields/facetables");
+    }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java
index 2c0e0a5c6b7..c57fa71a340 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java
@@ -41,7 +41,7 @@ public class RemoteOverlayAccessIOTest {
     public void setUp() {
         System.setProperty("dataverse.files.test.type", "remote");
         System.setProperty("dataverse.files.test.label", "testOverlay");
-        System.setProperty("dataverse.files.test.base-url", "https://demo.dataverse.org/resources");
+        System.setProperty("dataverse.files.test.base-url", "https://data.qdr.syr.edu/resources");
         System.setProperty("dataverse.files.test.base-store", "file");
         System.setProperty("dataverse.files.test.download-redirect", "true");
         System.setProperty("dataverse.files.test.remote-store-name", "DemoDataCorp");
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java
index ed6112539ed..380a4bbcf18 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java
@@ -6,7 +6,10 @@
 package edu.harvard.iq.dataverse.engine.command.impl;
 
 import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.DatasetLock;
+import edu.harvard.iq.dataverse.DatasetServiceBean;
 import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.DataverseRoleServiceBean;
 import edu.harvard.iq.dataverse.DataverseServiceBean;
 import edu.harvard.iq.dataverse.DvObject;
 import edu.harvard.iq.dataverse.Guestbook;
@@ -14,6 +17,8 @@
 import edu.harvard.iq.dataverse.GuestbookServiceBean;
 import edu.harvard.iq.dataverse.MetadataBlock;
 import edu.harvard.iq.dataverse.PermissionServiceBean;
+import edu.harvard.iq.dataverse.RoleAssignment;
+import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.RoleAssignee;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.GuestUser;
@@ -25,6 +30,8 @@
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
 import static edu.harvard.iq.dataverse.mocks.MocksFactory.makeAuthenticatedUser;
+import static edu.harvard.iq.dataverse.mocks.MocksFactory.makeRole;
+import static edu.harvard.iq.dataverse.mocks.MocksFactory.nextId;
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
 import java.sql.Timestamp;
 import java.util.ArrayList;
@@ -48,6 +55,9 @@
 import jakarta.persistence.metamodel.Metamodel;
 import jakarta.servlet.http.HttpServletRequest;
 import jakarta.ws.rs.core.Context;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.*;
 import static org.junit.jupiter.api.Assertions.*;
@@ -61,16 +71,17 @@
  * @author skraffmi
  */
 public class MoveDatasetCommandTest {
-        Dataset moved, movedResponses;
-    	Dataverse root, childA, childB, grandchildAA, childDraft, grandchildBB;
-	DataverseEngine testEngine;
-        MetadataBlock blockA, blockB, blockC, blockD;
-        AuthenticatedUser auth, nobody;
-        Guestbook gbA, gbB, gbC;
-        GuestbookResponse gbResp;
-        @Context
-        protected HttpServletRequest httpRequest;
-	
+
+    Dataset moved, movedResponses, movedPerms, movedSamePerms;
+    Dataverse root, childA, childB, grandchildAA, childDraft, grandchildBB, childEditor, sibEditor;
+    DataverseEngine testEngine;
+    MetadataBlock blockA, blockB, blockC, blockD;
+    AuthenticatedUser auth, nobody;
+    Guestbook gbA, gbB, gbC;
+    GuestbookResponse gbResp;
+    @Context
+    protected HttpServletRequest httpRequest;
+
     @BeforeEach
     public void setUp() {
 
@@ -79,43 +90,60 @@ public void setUp() {
         nobody = makeAuthenticatedUser("Nick", "Nobody");
         nobody.setSuperuser(false);
 
-        
-        
         root = new Dataverse();
         root.setName("root");
         root.setId(1l);
         root.setPublicationDate(new Timestamp(new Date().getTime()));
-        
+        root.setDefaultContributorRole(roles.findBuiltinRoleByAlias(DataverseRole.CURATOR));
+
         childA = new Dataverse();
         childA.setName("childA");
         childA.setId(2l);
         childA.setPublicationDate(new Timestamp(new Date().getTime()));
-        
+
         childB = new Dataverse();
         childB.setName("childB");
         childB.setId(3l);
-        childB.setPublicationDate(new Timestamp(new Date().getTime())); 
-        
+        childB.setPublicationDate(new Timestamp(new Date().getTime()));
+
         grandchildAA = new Dataverse();
         grandchildAA.setName("grandchildAA");
         grandchildAA.setId(4l);
         grandchildAA.setPublicationDate(new Timestamp(new Date().getTime()));
-        
+
         childDraft = new Dataverse();
         childDraft.setName("childDraft");
         childDraft.setId(5l);
-        
+
         grandchildBB = new Dataverse();
         grandchildBB.setName("grandchildBB");
         grandchildBB.setId(6l);
         grandchildBB.setPublicationDate(new Timestamp(new Date().getTime()));
 
-        
+        childEditor = new Dataverse();
+        childEditor.setName("childEditor");
+        childEditor.setId(7l);
+        childEditor.setDefaultContributorRole(roles.findBuiltinRoleByAlias(DataverseRole.EDITOR));
+
+        sibEditor = new Dataverse();
+        sibEditor.setName("sibEditor");
+        sibEditor.setId(8l);
+        sibEditor.setDefaultContributorRole(roles.findBuiltinRoleByAlias(DataverseRole.EDITOR));
+
+        movedPerms = new Dataset();
+        movedPerms.setOwner(childEditor);
+        DatasetLock lock = new DatasetLock(DatasetLock.Reason.InReview, nobody, null);
+        movedPerms.addLock(lock);
+
+        movedSamePerms = new Dataset();
+        movedSamePerms.setOwner(childEditor);
+        movedSamePerms.addLock(lock);
+
         moved = new Dataset();
         moved.setOwner(root);
         moved.setPublicationDate(new Timestamp(new Date().getTime()));
         moved.setId(1l);
-        
+
         movedResponses = new Dataset();
         movedResponses.setOwner(root);
         movedResponses.setPublicationDate(new Timestamp(new Date().getTime()));
@@ -126,39 +154,39 @@ public void setUp() {
         grandchildAA.setOwner(childA);
         grandchildBB.setOwner(childA);
         childDraft.setOwner(childA);
-        
-        gbA= new Guestbook();
+
+        gbA = new Guestbook();
         gbA.setId(1l);
-        gbB= new Guestbook();
+        gbB = new Guestbook();
         gbB.setId(2l);
-        gbC= new Guestbook();
+        gbC = new Guestbook();
         gbC.setId(3l);
-        
+
         moved.setGuestbook(gbA);
         movedResponses.setGuestbook(gbA);
-        
-        GuestbookResponse gbResp = new GuestbookResponse(); 
+
+        GuestbookResponse gbResp = new GuestbookResponse();
         gbResp.setGuestbook(gbA);
         gbResp.setDataset(movedResponses);
-        
+
         List<Guestbook> includeA = new ArrayList();
         includeA.add(gbA);
         includeA.add(gbB);
-        
+
         grandchildAA.setGuestbooks(includeA);
-        
+
         List<Guestbook> notIncludeA = new ArrayList();
         notIncludeA.add(gbC);
         notIncludeA.add(gbB);
-        
+
         childB.setGuestbooks(notIncludeA);
-        
-        List<Guestbook> none = new ArrayList();       
+
+        List<Guestbook> none = new ArrayList();
         root.setGuestbooks(none);
         grandchildBB.setGuestbooks(none);
         grandchildBB.setGuestbookRoot(false);
         childA.setGuestbooks(includeA);
-        
+
         testEngine = new TestDataverseEngine(new TestCommandContext() {
             @Override
             public DataverseServiceBean dataverses() {
@@ -170,31 +198,46 @@ public Dataverse save(Dataverse dataverse) {
                     }
                 };
             }
-            
+
+            @Override
+            public DatasetServiceBean datasets() {
+                return new DatasetServiceBean() {
+                    @Override
+                    public void removeDatasetLocks(Dataset dataset, DatasetLock.Reason aReason) {
+                        new HashSet<>(dataset.getLocks()).stream()
+                                .filter(l -> l.getReason() == aReason)
+                                .forEach(lock -> {
+                                    dataset.removeLock(lock);
+                                });
+
+                    }
+                };
+            }
+
             @Override
             public GuestbookServiceBean guestbooks() {
                 return new GuestbookServiceBean() {
                     @Override
                     public Long findCountResponsesForGivenDataset(Long guestbookId, Long datasetId) {
                         //We're going to fake a response for a dataset with responses
-                        if(datasetId == 1){
+                        if (datasetId == 1) {
                             return new Long(0);
-                        } else{
+                        } else {
                             return new Long(1);
                         }
                     }
                 };
             }
-            
+
             @Override
-            public IndexServiceBean index(){
-                return new IndexServiceBean(){
+            public IndexServiceBean index() {
+                return new IndexServiceBean() {
                     @Override
-                    public void asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp){
+                    public void asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) {
                     }
                 };
             }
-            
+
             @Override
             public EntityManager em() {
                 return new MockEntityManager() {
@@ -217,14 +260,55 @@ public boolean isUserAllowedOn(RoleAssignee roleAssignee, Command<?> command, Dv
                     }
                 };
             }
-            
+
         });
     }
-	
-	/**
-	 * Moving ChildB to ChildA
-	 * @throws Exception - should not throw an exception
-	 */
+
+    DataverseRoleServiceBean roles = new DataverseRoleServiceBean() {
+
+        List<RoleAssignment> assignments = new LinkedList<>();
+
+        Map<String, DataverseRole> builtInRoles;
+
+        {
+            builtInRoles = new HashMap<>();
+            builtInRoles.put(DataverseRole.EDITOR, makeRole("default-editor", false));
+            builtInRoles.put(DataverseRole.ADMIN, makeRole("default-admin"));
+            builtInRoles.put(DataverseRole.MANAGER, makeRole("default-manager"));
+            builtInRoles.put(DataverseRole.CURATOR, makeRole("curator"));
+        }
+
+        @Override
+        public DataverseRole findBuiltinRoleByAlias(String alias) {
+            return builtInRoles.get(alias);
+        }
+
+        @Override
+        public RoleAssignment save(RoleAssignment assignment) {
+            assignment.setId(nextId());
+            assignments.add(assignment);
+            return assignment;
+        }
+
+        @Override
+        public RoleAssignment save(RoleAssignment assignment, boolean index) {
+            return save(assignment);
+        }
+
+        @Override
+        public List<RoleAssignment> directRoleAssignments(DvObject dvo) {
+            // works since there's only one dataverse involved in the context 
+            // of this unit test.
+            return assignments;
+        }
+
+    };
+
+    /**
+     * Moving ChildB to ChildA
+     *
+     * @throws Exception - should not throw an exception
+     */
     @Test
     public void testValidMove() throws Exception {
 
@@ -234,11 +318,10 @@ public void testValidMove() throws Exception {
         assertEquals(childA, moved.getOwner());
 
     }
-    
+
     /**
-	 * Moving  grandchildAA
-	 * Guestbook is not null because target includes it.
-	 */
+     * Moving grandchildAA Guestbook is not null because target includes it.
+     */
     @Test
     public void testKeepGuestbook() throws Exception {
 
@@ -248,12 +331,10 @@ public void testKeepGuestbook() throws Exception {
         assertNotNull(moved.getGuestbook());
 
     }
-    
-        /**
-	 * Moving to grandchildBB
-	 * Guestbook is not null because target inherits it.
-	 */
-    
+
+    /**
+     * Moving to grandchildBB Guestbook is not null because target inherits it.
+     */
     @Test
     public void testKeepGuestbookInherit() throws Exception {
 
@@ -263,39 +344,53 @@ public void testKeepGuestbookInherit() throws Exception {
         assertNotNull(moved.getGuestbook());
 
     }
-    
-    
+
     /**
-	 * Moving to ChildB
-	 * Guestbook is null because target does not include it
-	 */
+     * Moving to ChildB Guestbook is null because target does not include it
+     */
     @Test
     public void testRemoveGuestbook() throws Exception {
 
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
         testEngine.submit(new MoveDatasetCommand(aRequest, moved, childB, true));
-        assertNull( moved.getGuestbook());
+        assertNull(moved.getGuestbook());
 
     }
-    	
-	
-	/**
-	 * Moving DS to its owning DV 
-        * @throws IllegalCommandException
-	 */
+
+    @Test
+    public void testMoveToDifferentPerms() throws Exception {
+        DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
+        testEngine.submit(new MoveDatasetCommand(aRequest, movedPerms, root, true));
+        assertTrue(movedPerms.getLocks().isEmpty());
+        assertTrue(movedPerms.getOwner().equals(root));
+    }
+
+    @Test
+    public void testMoveToSamePerms() throws Exception {
+        DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
+        testEngine.submit(new MoveDatasetCommand(aRequest, movedSamePerms, sibEditor, true));
+        assertTrue(movedSamePerms.getLocks().size() == 1);
+        assertTrue(movedSamePerms.getOwner().equals(sibEditor));
+    }
+
+    /**
+     * Moving DS to its owning DV
+     *
+     * @throws IllegalCommandException
+     */
     @Test
     void testInvalidMove() {
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
         assertThrows(IllegalCommandException.class,
-            () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, root, false)));
+                () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, root, false)));
     }
-        
+
     /**
      * Moving a dataset without having enough permission fails with
      * PermissionException.
      *
      * @throws java.lang.Exception
-     * 
+     *
      * Ignoring after permissions change in 47fb045. Did that change make this
      * case untestable? Unclear.
      */
@@ -305,7 +400,7 @@ void testAuthenticatedUserWithNoRole() {
 
         DataverseRequest aRequest = new DataverseRequest(nobody, httpRequest);
         assertThrows(IllegalCommandException.class,
-            () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, childA, null)));
+                () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, childA, null)));
     }
 
     /**
@@ -319,22 +414,22 @@ void testNotAuthenticatedUser() {
 
         DataverseRequest aRequest = new DataverseRequest(GuestUser.get(), httpRequest);
         assertThrows(PermissionException.class,
-            () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, root, null)));
+                () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, root, null)));
     }
-    
-    	/**
-	 * Moving published  DS to unpublished DV
-        * @throws IllegalCommandException
-	 */
+
+    /**
+     * Moving published DS to unpublished DV
+     *
+     * @throws IllegalCommandException
+     */
     @Test
     void testInvalidMovePublishedToUnpublished() {
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
         assertThrows(IllegalCommandException.class,
-            () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, childDraft, null)));
+                () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, childDraft, null)));
     }
-         
-        
-        private static class EntityManagerImpl implements EntityManager {
+
+    private static class EntityManagerImpl implements EntityManager {
 
         @Override
         public void persist(Object entity) {
@@ -591,8 +686,8 @@ public <T> List<EntityGraph<? super T>> getEntityGraphs(Class<T> entityClass) {
             throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
         }
 
-    }    
-        
+    }
+
     private static class MockEntityManager extends EntityManagerImpl {
 
         @Override
diff --git a/src/test/java/edu/harvard/iq/dataverse/metrics/MetricsUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/metrics/MetricsUtilTest.java
index 484ce2ebe47..ca662409a98 100644
--- a/src/test/java/edu/harvard/iq/dataverse/metrics/MetricsUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/metrics/MetricsUtilTest.java
@@ -6,38 +6,36 @@
 import java.time.format.DateTimeFormatter;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Arrays;
-import java.util.Collection;
 import jakarta.json.Json;
 import jakarta.json.JsonArray;
 import jakarta.json.JsonArrayBuilder;
 import jakarta.json.JsonObject;
 
+import org.junit.jupiter.api.Nested;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.CsvSource;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
 import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.junit.jupiter.api.Assertions.fail;
 
-public class MetricsUtilTest {
+class MetricsUtilTest {
 
-    public static class MetricsUtilNoParamTest {
+    @Nested
+    class MetricsUtilNoParamTest {
 
         private static final long COUNT = 42l;
 
         @Test
-        public void testCountToJson() {
-            // This constructor is just here for code coverage. :)
-            MetricsUtil metricsUtil = new MetricsUtil();
+        void testCountToJson() {
             JsonObject jsonObject = MetricsUtil.countToJson(COUNT).build();
             System.out.println(JsonUtil.prettyPrint(jsonObject));
             assertEquals(COUNT, jsonObject.getJsonNumber("count").longValue());
         }
 
         @Test
-        public void testDataversesByCategoryToJson() {
+        void testDataversesByCategoryToJson() {
             List<Object[]> list = new ArrayList<>();
             Object[] obj00 = { "RESEARCH_PROJECTS", 791l };
             Object[] obj01 = { "RESEARCHERS", 745l };
@@ -66,7 +64,7 @@ public void testDataversesByCategoryToJson() {
         }
 
         @Test
-        public void testDatasetsBySubjectToJson() {
+        void testDatasetsBySubjectToJson() {
             List<Object[]> list = new ArrayList<>();
             Object[] obj00 = { "Social Sciences", 24955l };
             Object[] obj01 = { "Medicine, Health and Life Sciences", 2262l };
@@ -105,7 +103,7 @@ public void testDatasetsBySubjectToJson() {
         }
 
         @Test
-        public void testDataversesBySubjectToJson() {
+        void testDataversesBySubjectToJson() {
             List<Object[]> list = new ArrayList<>();
             Object[] obj00 = { "Social Sciences", 24955l };
             Object[] obj01 = { "Medicine, Health and Life Sciences", 2262l };
@@ -164,7 +162,7 @@ void testSanitizeYearMonthUserInputIsAfterCurrentDate() {
         }
 
         @Test
-        public void testGetCurrentMonth() {
+        void testGetCurrentMonth() {
             String expectedMonth = LocalDate.now().format(DateTimeFormatter.ofPattern("yyyy-MM"));
             String currentMonth = MetricsUtil.getCurrentMonth();
             assertEquals(expectedMonth, currentMonth);
@@ -173,7 +171,7 @@ public void testGetCurrentMonth() {
         // Create JsonArray, turn into string and back into array to confirm data
         // integrity
         @Test
-        public void testStringToJsonArrayBuilder() {
+        void testStringToJsonArrayBuilder() {
             System.out.println("testStringToJsonArrayBuilder");
             List<Object[]> list = new ArrayList<>();
             Object[] obj00 = { "Social Sciences", 24955l };
@@ -192,7 +190,7 @@ public void testStringToJsonArrayBuilder() {
         // Create JsonObject, turn into string and back into array to confirm data
         // integrity
         @Test
-        public void testStringToJsonObjectBuilder() {
+        void testStringToJsonObjectBuilder() {
             System.out.println("testStringToJsonObjectBuilder");
 
             JsonObject jsonObjBefore = Json.createObjectBuilder().add("Test", "result").build();
@@ -204,6 +202,12 @@ public void testStringToJsonObjectBuilder() {
             assertEquals(jsonObjBefore.getString("Test"), jsonObjAfter.getString("Test"));
         }
 
+        @Test
+        void testStringToJsonWithNull() {
+            assertNull(MetricsUtil.stringToJsonArray(null));
+            assertNull(MetricsUtil.stringToJsonObject(null));
+        }
+
     }
     
     @ParameterizedTest
diff --git a/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java b/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java
index 927d288d660..9bda917a9bc 100644
--- a/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java
+++ b/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java
@@ -196,12 +196,17 @@ public static DatasetFieldType makeDatasetFieldType() {
         DatasetFieldType retVal = new DatasetFieldType("SampleType-"+id, FieldType.TEXT, false);
         retVal.setId(id);
         MetadataBlock mdb = new MetadataBlock();
+        mdb.setId(new Random().nextLong());
         mdb.setName("Test");
         retVal.setMetadataBlock(mdb);
         return retVal;
     }
     
-    public static DataverseRole makeRole( String name ) {
+    public static DataverseRole makeRole( String name ) {       
+        return makeRole(name, true);
+    }
+    
+    public static DataverseRole makeRole( String name, Boolean includePublishDataset ) {
         DataverseRole dvr = new DataverseRole();
         
         dvr.setId( nextId() );
@@ -211,7 +216,10 @@ public static DataverseRole makeRole( String name ) {
         
         dvr.addPermission(Permission.ManageDatasetPermissions);
         dvr.addPermission(Permission.EditDataset);
-        dvr.addPermission(Permission.PublishDataset);
+        if  (includePublishDataset){
+           dvr.addPermission(Permission.PublishDataset);
+        }
+
         dvr.addPermission(Permission.ViewUnpublishedDataset);
         
         return dvr;
diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java
index 4fb29869db7..d7deaa2dbc1 100644
--- a/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java
@@ -225,6 +225,29 @@ public void testSetPublicationStatuses14() {
         assertTrue(this.solrSearchResult.isDeaccessionedState());
     }
 
+    @Test
+    public void testSetPublicationStatusesJson() {
+
+        boolean showRelevance = false;
+        boolean showEntityIds = false;
+        boolean showApiUrls = false;
+
+        SolrSearchResult result01 = new SolrSearchResult("myQuery", "myName");
+        result01.setType(SearchConstants.DATAVERSES);
+        result01.setPublicationStatuses(List.of("Unpublished", "Draft"));
+        JsonObjectBuilder actual01 = result01.json(showRelevance, showEntityIds, showApiUrls);
+        JsonObject actual = actual01.build();
+        System.out.println("actual: " + actual);
+
+        JsonObjectBuilder expResult = Json.createObjectBuilder();
+        expResult.add("type", SearchConstants.DATAVERSE);
+        expResult.add("publicationStatuses", Json.createArrayBuilder().add("Unpublished").add("Draft").build());
+        JsonObject expected = expResult.build();
+        System.out.println("expect: " + expected);
+
+        assertEquals(expected, actual);
+    }
+
     @Test
     public void testJson() {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/validation/JSONDataValidationTest.java b/src/test/java/edu/harvard/iq/dataverse/validation/JSONDataValidationTest.java
new file mode 100644
index 00000000000..107dcecba35
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/validation/JSONDataValidationTest.java
@@ -0,0 +1,237 @@
+package edu.harvard.iq.dataverse.validation;
+
+import edu.harvard.iq.dataverse.ControlledVocabularyValue;
+import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
+import edu.harvard.iq.dataverse.DatasetFieldType;
+import org.everit.json.schema.Schema;
+import org.everit.json.schema.ValidationException;
+import org.everit.json.schema.loader.SchemaLoader;
+import org.json.JSONObject;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.jupiter.api.Assertions.fail;
+import static org.mockito.ArgumentMatchers.any;
+
+public class JSONDataValidationTest {
+
+    @Mock
+    static DatasetFieldServiceBean datasetFieldServiceMock;
+    @Mock
+    static DatasetFieldType datasetFieldTypeMock;
+    static ControlledVocabularyValue cvv = new ControlledVocabularyValue();
+    static Map<String, Map<String, List<String>>> schemaChildMap = new HashMap<>();
+
+    static JSONObject rawSchema = null;
+    static JSONObject rawSchema() throws IOException {
+        if (rawSchema == null) {
+                Path file = Path.of("doc/sphinx-guides/source/_static/api/dataset-schema.json");
+                String schema = Files.readString(file, StandardCharsets.UTF_8);
+                rawSchema = new JSONObject(schema);
+        }
+        return rawSchema;
+    }
+
+    static String jsonInput() {
+        return """
+                   {
+                   "datasetVersion": {
+                       "license": {
+                         "name": "CC0 1.0",
+                         "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+                       },
+                       "metadataBlocks": {
+                         "citation": {
+                           "fields": [
+                             {
+                               "value": "Darwin's Finches",
+                               "typeClass": "primitive",
+                               "multiple": false,
+                               "typeName": "title"
+                             },
+                             {
+                               "value": [
+                                 {
+                                   "authorName": {
+                                     "value": "Finch, Fiona",
+                                     "typeClass": "primitive",
+                                     "multiple": false,
+                                     "typeName": "authorName"
+                                   },
+                                   "authorAffiliation": {
+                                     "value": "Birds Inc.",
+                                     "typeClass": "primitive",
+                                     "multiple": false,
+                                     "typeName": "authorAffiliation"
+                                   }
+                                 }
+                               ],
+                               "typeClass": "compound",
+                               "multiple": true,
+                               "typeName": "author"
+                             },
+                             {
+                               "value": [
+                                   { "datasetContactEmail" : {
+                                       "typeClass": "primitive",
+                                       "multiple": false,
+                                       "typeName": "datasetContactEmail",
+                                       "value" : "finch@mailinator.com"
+                                   },
+                                   "datasetContactName" : {
+                                       "typeClass": "primitive",
+                                       "multiple": false,
+                                       "typeName": "datasetContactName",
+                                       "value": "Finch, Fiona"
+                                   }
+                               }],
+                               "typeClass": "compound",
+                               "multiple": true,
+                               "typeName": "datasetContact"
+                             },
+                             {
+                               "value": [{
+                                  "dsDescriptionValue":{
+                                    "value":   "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds.",
+                                    "multiple": false,
+                                    "typeClass": "primitive",
+                                    "typeName": "dsDescriptionValue"
+                                  },
+                                  "dsDescriptionDate": {
+                                     "typeName": "dsDescriptionDate",
+                                     "multiple": false,
+                                     "typeClass": "primitive",
+                                     "value": "2021-07-13"
+                                   }
+                                }],
+                               "typeClass": "compound",
+                               "multiple": true,
+                               "typeName": "dsDescription"
+                              },
+                             {
+                               "value": [
+                                 "Medicine, Health and Life Sciences",
+                                 "Social Sciences"
+                               ],
+                               "typeClass": "controlledVocabulary",
+                               "multiple": true,
+                               "typeName": "subject"
+                             }
+                           ],
+                           "displayName": "Citation Metadata"
+                         }
+                       }
+                     }
+                   }
+                """;
+    }
+
+    @BeforeAll
+    static void setup() throws NoSuchFieldException, IllegalAccessException {
+        datasetFieldServiceMock = Mockito.mock(DatasetFieldServiceBean.class);
+        datasetFieldTypeMock = Mockito.mock(DatasetFieldType.class);
+        Field datasetFieldServiceField = JSONDataValidation.class.getDeclaredField("datasetFieldService");
+        datasetFieldServiceField.setAccessible(true);
+        datasetFieldServiceField.set(JSONDataValidation.class, datasetFieldServiceMock);
+
+        Mockito.when(datasetFieldServiceMock.findByName(any(String.class))).thenReturn(datasetFieldTypeMock);
+        List<String> cvvList = List.of("Medicine, Health and Life Sciences", "Social Sciences");
+        cvvList.forEach(i -> {
+            Mockito.when(datasetFieldServiceMock.findControlledVocabularyValueByDatasetFieldTypeAndStrValue(datasetFieldTypeMock, i,true)).thenReturn(cvv);
+        });
+        Mockito.when(datasetFieldServiceMock.findControlledVocabularyValueByDatasetFieldTypeAndStrValue(datasetFieldTypeMock, "Bad",true)).thenReturn(null);
+
+        Map<String, List<String>> datasetContact = new HashMap<>();
+        datasetContact.put("required", List.of("datasetContactName"));
+        datasetContact.put("allowed", List.of("datasetContactName", "datasetContactEmail","datasetContactAffiliation"));
+        schemaChildMap.put("datasetContact",datasetContact);
+        Map<String, List<String>> dsDescription = new HashMap<>();
+        dsDescription.put("required", List.of("dsDescriptionValue"));
+        dsDescription.put("allowed", List.of("dsDescriptionValue", "dsDescriptionDate"));
+        schemaChildMap.put("dsDescription",dsDescription);
+
+    }
+    @Test
+    public void testSchema() throws IOException {
+        Schema schema = SchemaLoader.load(rawSchema());
+        schema.validate(new JSONObject(jsonInput()));
+    }
+    @Test
+    public void testValid() throws IOException {
+        Schema schema = SchemaLoader.load(rawSchema());
+        JSONDataValidation.validate(schema, schemaChildMap, jsonInput());
+    }
+    @Test
+    public void testInvalid() throws IOException {
+        Schema schema = SchemaLoader.load(rawSchema());
+        try {
+            JSONDataValidation.validate(schema, schemaChildMap, jsonInput().replace("\"Social Sciences\"", "\"Social Sciences\",\"Bad\""));
+            fail();
+        } catch (ValidationException e) {
+            System.out.println(e.getMessage());
+            System.out.println(e.getStackTrace());
+        }
+
+        try {
+            // test multiple = false but value is list
+            JSONDataValidation.validate(schema, schemaChildMap, jsonInput().replaceAll("true", "false"));
+            fail();
+        } catch (ValidationException e) {
+            System.out.println(e.getMessage());
+        }
+
+        // verify that child objects are also validated
+        String childTest = "\"multiple\": false, \"typeName\": \"authorAffiliation\"";
+        try {
+            String trimmedStr = jsonInput().replaceAll("\\s{2,}", " ");
+            // test child object with multiple set to true
+            JSONDataValidation.validate(schema, schemaChildMap, trimmedStr.replace(childTest, childTest.replace("false", "true")));
+            fail();
+        } catch (ValidationException e) {
+            System.out.println(e.getMessage());
+        }
+
+        try {
+            // test dsDescription but dsDescriptionValue missing
+            JSONDataValidation.validate(schema, schemaChildMap, jsonInput().replace("typeName\": \"dsDescriptionValue", "typeName\": \"notdsDescriptionValue"));
+            fail();
+        } catch (ValidationException e) {
+            System.out.println(e.getMessage());
+        }
+
+        try {
+            // test dsDescription but child dsDescriptionValue missing
+            JSONDataValidation.validate(schema, schemaChildMap, jsonInput().replace("dsDescriptionValue\":{", "notdsDescriptionValue\":{"));
+            fail();
+        } catch (ValidationException e) {
+            System.out.println(e.getMessage());
+        }
+
+        try {
+            // test required dataType missing
+            JSONDataValidation.validate(schema, schemaChildMap, jsonInput().replaceAll("\"datasetContactName\"", "\"datasetContactAffiliation\""));
+            fail();
+        } catch (ValidationException e) {
+            System.out.println(e.getMessage());
+        }
+
+        try {
+            // test dataType not allowed
+            JSONDataValidation.validate(schema, schemaChildMap, jsonInput().replaceAll("\"datasetContactEmail\"", "\"datasetContactNotAllowed\""));
+            fail();
+        } catch (ValidationException e) {
+            System.out.println(e.getMessage());
+        }
+    }
+}
diff --git a/tests/check_duplicate_properties.sh b/tests/check_duplicate_properties.sh
new file mode 100755
index 00000000000..7d053bdba4b
--- /dev/null
+++ b/tests/check_duplicate_properties.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+
+# This script will check Java *.properties files within the src dir for duplicates
+# and print logs with file annotations about it.
+
+set -euo pipefail
+
+FAIL=0
+
+while IFS= read -r -d '' FILE; do
+
+    # Scan the whole file for duplicates
+    FILTER=$(grep -a -v -E "^(#.*|\s*$)" "$FILE" | cut -d"=" -f1 | sort | uniq -c | tr -s " " | { grep -vs "^ 1 " || true; })
+
+    # If there are any duplicates present, analyse further to point people to the source
+    if [ -n "$FILTER" ]; then
+        FAIL=1
+
+        echo "::group::$FILE"
+        for KEY in $(echo "$FILTER" | cut -d" " -f3); do
+            # Find duplicate lines' numbers by grepping for the KEY and cutting the number from the output
+            DUPLICATE_LINES=$(grep -n -E -e "^$KEY=" "$FILE" | cut -d":" -f1)
+            # Join the found line numbers for better error log
+            DUPLICATE_NUMBERS=$(echo "$DUPLICATE_LINES" | paste -sd ',')
+
+            # This form will make Github annotate the lines in the PR that changes the properties file
+            for LINE_NUMBER in $DUPLICATE_LINES; do
+                echo "::error file=$FILE,line=$LINE_NUMBER::Found duplicate for key '$KEY' in lines $DUPLICATE_NUMBERS"
+            done
+        done
+        echo "::endgroup::"
+    fi
+done < <( find "$(git rev-parse --show-toplevel)" -wholename "*/src/*.properties" -print0 )
+
+if [ "$FAIL" -eq 1 ]; then
+    exit 1
+fi
diff --git a/tests/integration-tests.txt b/tests/integration-tests.txt
index 44bbfdcceb7..fc3fc9b4a3f 100644
--- a/tests/integration-tests.txt
+++ b/tests/integration-tests.txt
@@ -1 +1 @@
-DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,HarvestingClientsIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT,AuxiliaryFilesIT,InvalidCharactersIT,LicensesIT,NotificationsIT,BagIT,MetadataBlocksIT,NetcdfIT,SignpostingIT,FitsIT,LogoutIT,DataRetrieverApiIT,ProvIT,S3AccessIT,OpenApiIT,InfoIT
+DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,HarvestingClientsIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT,AuxiliaryFilesIT,InvalidCharactersIT,LicensesIT,NotificationsIT,BagIT,MetadataBlocksIT,NetcdfIT,SignpostingIT,FitsIT,LogoutIT,DataRetrieverApiIT,ProvIT,S3AccessIT,OpenApiIT,InfoIT,DatasetFieldsIT,SavedSearchIT
diff --git a/tests/shell/spec/spec_helper.sh b/tests/shell/spec/spec_helper.sh
index 93f19083cd2..0cf2106f5e0 100644
--- a/tests/shell/spec/spec_helper.sh
+++ b/tests/shell/spec/spec_helper.sh
@@ -22,3 +22,4 @@ spec_helper_configure() {
   # Available functions: import, before_each, after_each, before_all, after_all
   : import 'support/custom_matcher'
 }
+
diff --git a/tests/verify_mdb_properties.sh b/tests/verify_mdb_properties.sh
new file mode 100755
index 00000000000..bc62c0f503a
--- /dev/null
+++ b/tests/verify_mdb_properties.sh
@@ -0,0 +1,99 @@
+#!/bin/bash
+
+# This script will check our metadata block files and scan if the properties files contain all the matching keys.
+
+set -euo pipefail
+
+if ! which jbang > /dev/null 2>&1; then
+  echo "Cannot find jbang on path. Did you install it?" >&2
+  exit 1
+fi
+if ! which native-image > /dev/null 2>&1; then
+  echo "Cannot find GraalVM native-image on path. Did you install it?" >&2
+  exit 1
+fi
+
+FAIL=0
+
+# We need a small Java app here, replacing spaces, converting to lower case but especially to replace UTF-8 chars with nearest ascii / strip accents because of
+# https://github.com/IQSS/dataverse/blob/dddcf29188a5c35174f3c94ffc1c4cb1d7fc0552/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java#L139-L140
+# This cannot be replaced by another tool, as it behaves rather individually.
+DIR=$(mktemp -d)
+SOURCE="$DIR/stripaccents.java"
+STRIP_BIN="$(dirname "$0")/stripaccents"
+cat > "$SOURCE" << EOF
+///usr/bin/env jbang "\$0" "\$@" ; exit \$?
+//JAVA 11+
+//DEPS org.apache.commons:commons-lang3:3.12.0
+import org.apache.commons.lang3.StringUtils;
+import java.nio.charset.StandardCharsets;
+import java.io.IOException;
+class stripaccents {
+    public static void main(String[] args) throws IOException {
+        String input = new String(System.in.readAllBytes(), StandardCharsets.UTF_8).toLowerCase().replace(" ", "_");
+        System.out.println(StringUtils.stripAccents(input));
+    }
+}
+EOF
+jbang export native --force --fresh -O "$STRIP_BIN" "$SOURCE"
+
+while IFS= read -r -d '' MDB; do
+
+    echo "::group::$MDB"
+    BLOCK_NAME=$(sed -n "2p" "$MDB" | cut -f2)
+    BLOCK_DISPLAY_NAME=$(sed -n "2p" "$MDB" | cut -f4)
+    PROPERTIES_FILE="$(git rev-parse --show-toplevel)/src/main/java/propertyFiles/$BLOCK_NAME.properties"
+
+    # Check correct file exists
+    if [ ! -r "$PROPERTIES_FILE" ]; then
+        echo "::error::Missing properties file for metadata block '$BLOCK_NAME', expected at '$PROPERTIES_FILE'"
+        FAIL=1
+        continue
+    fi
+
+    # Check metadata block properties exist and are equal to TSV source
+    if ! grep -a -q -e "^metadatablock.name=$BLOCK_NAME$" "$PROPERTIES_FILE"; then
+        echo "::error::Missing 'metadatablock.name=$BLOCK_NAME' or different from TSV source in $PROPERTIES_FILE"
+        FAIL=1
+    fi
+    if ! grep -a -q -e "^metadatablock.displayName=$BLOCK_DISPLAY_NAME$" "$PROPERTIES_FILE"; then
+        echo "::error::Missing 'metadatablock.displayName=$BLOCK_DISPLAY_NAME' or different from TSV source in $PROPERTIES_FILE"
+        FAIL=1
+    fi
+    if ! grep -a -q -e "^metadatablock.displayFacet=" "$PROPERTIES_FILE"; then
+        echo "::error::Missing 'metadatablock.displayFacet=...' in $PROPERTIES_FILE"
+        FAIL=1
+    fi
+
+    # Check dataset fields
+    for FIELD in $(grep -a -A1000 "^#datasetField" "$MDB" | tail -n+2 | grep -a -B1000 "^#controlledVocabulary" | head -n-1 | cut -f2); do
+        for ENTRY in title description watermark; do
+            if ! grep -a -q -e "^datasetfieldtype.$FIELD.$ENTRY=" "$PROPERTIES_FILE"; then
+                echo "::error::Missing key 'datasetfieldtype.$FIELD.$ENTRY=...' in $PROPERTIES_FILE"
+                FAIL=1
+            fi
+        done
+    done
+
+    # Check CV entries
+    while read -r LINE; do
+        FIELD_NAME=$(echo "$LINE" | cut -f1)
+        # See https://github.com/IQSS/dataverse/blob/dddcf29188a5c35174f3c94ffc1c4cb1d7fc0552/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java#L139-L140
+        # Square brackets are special in grep with expressions activated, so escape them if present!
+        FIELD_VALUE=$(echo "$LINE" | cut -f2 | "$STRIP_BIN" | sed -e 's/\([][]\)/\\\1/g' )
+
+        if ! grep -q -a -e "^controlledvocabulary.$FIELD_NAME.$FIELD_VALUE=" "$PROPERTIES_FILE"; then
+            echo "::error::Missing key 'controlledvocabulary.$FIELD_NAME.$FIELD_VALUE=...' in $PROPERTIES_FILE"
+            FAIL=1
+        fi
+    done < <(grep -a -A1000 "^#controlledVocabulary" "$MDB" | tail -n+2)
+
+    echo "::endgroup::"
+
+done < <( find "$(git rev-parse --show-toplevel)/scripts/api/data/metadatablocks" -name '*.tsv' -print0 )
+
+rm "$SOURCE" "$STRIP_BIN"
+
+if [ "$FAIL" -eq 1 ]; then
+    exit 1
+fi