diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index a9ff8760e763..9d32cb119d41 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -15,6 +15,7 @@ on: permissions: contents: read # to fetch code (actions/checkout) + packages: write # to write images to GitHub Container Registry (GHCR) jobs: #################################################### @@ -147,4 +148,102 @@ jobs: tags_flavor: suffix=-loadsql secrets: DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} - DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }} \ No newline at end of file + DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }} + + ################################################################################# + # Test Deployment via Docker to ensure newly built images are working properly + ################################################################################# + docker-deploy: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + runs-on: ubuntu-latest + # Must run after all major images are built + needs: [dspace, dspace-test, dspace-cli, dspace-postgres-pgcrypto, dspace-solr] + env: + # Override defaults dspace.server.url because backend starts at http://127.0.0.1:8080 + dspace__P__server__P__url: http://127.0.0.1:8080/server + # Enable all optional modules / controllers for this test deployment. + # This helps check for errors in deploying these modules via Spring Boot + iiif__P__enabled: true + ldn__P__enabled: true + oai__P__enabled: true + rdf__P__enabled: true + signposting__P__enabled: true + sword__D__server__P__enabled: true + swordv2__D__server__P__enabled: true + # If this is a PR against main (default branch), use "latest". + # Else if this is a PR against a different branch, used the base branch name. + # Else if this is a commit on main (default branch), use the "latest" tag. + # Else, just use the branch name. + # NOTE: DSPACE_VER is used because our docker compose scripts default to using the "-test" image. + DSPACE_VER: ${{ (github.event_name == 'pull_request' && github.event.pull_request.base.ref == github.event.repository.default_branch && 'latest') || (github.event_name == 'pull_request' && github.event.pull_request.base.ref) || (github.ref_name == github.event.repository.default_branch && 'latest') || github.ref_name }} + # Docker Registry to use for Docker compose scripts below. + # We use GitHub's Container Registry to avoid aggressive rate limits at DockerHub. + DOCKER_REGISTRY: ghcr.io + steps: + # Checkout our codebase (to get access to Docker Compose scripts) + - name: Checkout codebase + uses: actions/checkout@v4 + # Download Docker image artifacts (which were just built by reusable-docker-build.yml) + - name: Download Docker image artifacts + uses: actions/download-artifact@v4 + with: + # Download all amd64 Docker images (TAR files) into the /tmp/docker directory + pattern: docker-image-*-linux-amd64 + path: /tmp/docker + merge-multiple: true + # Load each of the images into Docker by calling "docker image load" for each. + # This ensures we are using the images just built & not any prior versions on DockerHub + - name: Load all downloaded Docker images + run: | + find /tmp/docker -type f -name "*.tar" -exec docker image load --input "{}" \; + docker image ls -a + # Start backend using our compose script in the codebase. + - name: Start backend in Docker + run: | + docker compose -f docker-compose.yml up -d + sleep 10 + docker container ls + # Create a test admin account. Load test data from a simple set of AIPs as defined in cli.ingest.yml + - name: Load test data into Backend + run: | + docker compose -f docker-compose-cli.yml run --rm dspace-cli create-administrator -e test@test.edu -f admin -l user -p admin -c en + docker compose -f docker-compose-cli.yml -f dspace/src/main/docker-compose/cli.ingest.yml run --rm dspace-cli + # Verify backend started successfully. + # 1. Make sure root endpoint is responding (check for dspace.name defined in docker-compose.yml) + # 2. Also check /collections endpoint to ensure the test data loaded properly (check for a collection name in AIPs) + - name: Verify backend is responding properly + run: | + result=$(wget -O- -q http://127.0.0.1:8080/server/api) + echo "$result" + echo "$result" | grep -oE "\"DSpace Started with Docker Compose\"," + result=$(wget -O- -q http://127.0.0.1:8080/server/api/core/collections) + echo "$result" + echo "$result" | grep -oE "\"Dog in Yard\"," + # Verify Handle Server can be stared and is working properly + # 1. First generate the "[dspace]/handle-server" folder with the sitebndl.zip + # 2. Start the Handle Server (and wait 20 seconds to let it start up) + # 3. Verify logs do NOT include "Exception" in the text (as that means an error occurred) + # 4. Check that Handle Proxy HTML page is responding on default port (8000) + - name: Verify Handle Server is working properly + run: | + docker exec -i dspace /dspace/bin/make-handle-config + echo "Starting Handle Server..." + docker exec -i dspace /dspace/bin/start-handle-server + sleep 20 + echo "Checking for errors in error.log" + result=$(docker exec -i dspace sh -c "cat /dspace/handle-server/logs/error.log* || echo ''") + echo "$result" + echo "$result" | grep -vqz "Exception" + echo "Checking for errors in handle-server.log..." + result=$(docker exec -i dspace cat /dspace/log/handle-server.log) + echo "$result" + echo "$result" | grep -vqz "Exception" + echo "Checking to see if Handle Proxy webpage is available..." + result=$(wget -O- -q http://127.0.0.1:8000/) + echo "$result" + echo "$result" | grep -oE "Handle Proxy" + # Shutdown our containers + - name: Shutdown Docker containers + run: | + docker compose -f docker-compose.yml down diff --git a/.github/workflows/reusable-docker-build.yml b/.github/workflows/reusable-docker-build.yml index 7a8de661fa68..7a8abda3e106 100644 --- a/.github/workflows/reusable-docker-build.yml +++ b/.github/workflows/reusable-docker-build.yml @@ -54,10 +54,13 @@ env: # For a new commit on default branch (main), use the literal tag 'latest' on Docker image. # For a new commit on other branches, use the branch name as the tag for Docker image. # For a new tag, copy that tag name as the tag for Docker image. + # For a pull request, use the name of the base branch that the PR was created against or "latest" (for main). + # e.g. PR against 'main' will use "latest". a PR against 'dspace-7_x' will use 'dspace-7_x'. IMAGE_TAGS: | type=raw,value=latest,enable=${{ github.ref_name == github.event.repository.default_branch }} type=ref,event=branch,enable=${{ github.ref_name != github.event.repository.default_branch }} type=ref,event=tag + type=raw,value=${{ (github.event.pull_request.base.ref == github.event.repository.default_branch && 'latest') || github.event.pull_request.base.ref }},enable=${{ github.event_name == 'pull_request' }} # Define default tag "flavor" for docker/metadata-action per # https://github.com/docker/metadata-action#flavor-input # We manage the 'latest' tag ourselves to the 'main' branch (see settings above) @@ -72,6 +75,9 @@ env: DEPLOY_DEMO_BRANCH: 'dspace-8_x' DEPLOY_SANDBOX_BRANCH: 'main' DEPLOY_ARCH: 'linux/amd64' + # Registry used during building of Docker images. (All images are later copied to docker.io registry) + # We use GitHub's Container Registry to avoid aggressive rate limits at DockerHub. + DOCKER_BUILD_REGISTRY: ghcr.io jobs: docker-build: @@ -96,6 +102,7 @@ jobs: # This step converts the slashes in the "arch" matrix values above into dashes & saves to env.ARCH_NAME # E.g. "linux/amd64" becomes "linux-amd64" # This is necessary because all upload artifacts CANNOT have special chars (like slashes) + # NOTE: The regex-like syntax below is Bash Parameter Substitution - name: Prepare run: | platform=${{ matrix.arch }} @@ -105,35 +112,45 @@ jobs: - name: Checkout codebase uses: actions/checkout@v4 - # https://github.com/docker/setup-buildx-action - - name: Setup Docker Buildx - uses: docker/setup-buildx-action@v3 + # https://github.com/docker/login-action + # NOTE: This login occurs for BOTH non-PRs or PRs. PRs *must* also login to access private images from GHCR + # during the build process + - name: Login to ${{ env.DOCKER_BUILD_REGISTRY }} + uses: docker/login-action@v3 + with: + registry: ${{ env.DOCKER_BUILD_REGISTRY }} + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} # https://github.com/docker/setup-qemu-action - name: Set up QEMU emulation to build for multiple architectures uses: docker/setup-qemu-action@v3 - # https://github.com/docker/login-action - - name: Login to DockerHub - # Only login if not a PR, as PRs only trigger a Docker build and not a push - if: ${{ ! matrix.isPr }} - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + # https://github.com/docker/setup-buildx-action + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v3 # https://github.com/docker/metadata-action - # Get Metadata for docker_build_deps step below - - name: Sync metadata (tags, labels) from GitHub to Docker for image + # Extract metadata used for Docker images in all build steps below + - name: Extract metadata (tags, labels) from GitHub for Docker image id: meta_build uses: docker/metadata-action@v5 with: - images: ${{ env.IMAGE_NAME }} + images: ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }} tags: ${{ env.IMAGE_TAGS }} flavor: ${{ env.TAGS_FLAVOR }} + #-------------------------------------------------------------------- + # First, for all branch commits (non-PRs) we build the image & upload + # to GitHub Container Registry (GHCR). After uploading the image + # to GHCR, we store the image digest in an artifact, so we can + # create a merged manifest later (see 'docker-build_manifest' job). + # + # NOTE: We use GHCR in order to avoid aggressive rate limits at DockerHub. + #-------------------------------------------------------------------- # https://github.com/docker/build-push-action - - name: Build and push image + - name: Build and push image to ${{ env.DOCKER_BUILD_REGISTRY }} + if: ${{ ! matrix.isPr }} id: docker_build uses: docker/build-push-action@v5 with: @@ -141,15 +158,20 @@ jobs: ${{ inputs.dockerfile_additional_contexts }} context: ${{ inputs.dockerfile_context }} file: ${{ inputs.dockerfile_path }} + # Tell DSpace's Docker files to use the build registry instead of DockerHub + build-args: + DOCKER_REGISTRY=${{ env.DOCKER_BUILD_REGISTRY }} platforms: ${{ matrix.arch }} - # For pull requests, we run the Docker build (to ensure no PR changes break the build), - # but we ONLY do an image push to DockerHub if it's NOT a PR - push: ${{ ! matrix.isPr }} + push: true # Use tags / labels provided by 'docker/metadata-action' above tags: ${{ steps.meta_build.outputs.tags }} labels: ${{ steps.meta_build.outputs.labels }} + # Use GitHub cache to load cached Docker images and cache the results of this build + # This decreases the number of images we need to fetch from DockerHub + cache-from: type=gha,scope=${{ inputs.build_id }} + cache-to: type=gha,scope=${{ inputs.build_id }},mode=max - # Export the digest of Docker build locally (for non PRs only) + # Export the digest of Docker build locally - name: Export Docker build digest if: ${{ ! matrix.isPr }} run: | @@ -157,7 +179,8 @@ jobs: digest="${{ steps.docker_build.outputs.digest }}" touch "/tmp/digests/${digest#sha256:}" - # Upload digest to an artifact, so that it can be used in manifest below + # Upload digest to an artifact, so that it can be used in combined manifest below + # (The purpose of the combined manifest is to list both amd64 and arm64 builds under same tag) - name: Upload Docker build digest to artifact if: ${{ ! matrix.isPr }} uses: actions/upload-artifact@v4 @@ -167,33 +190,60 @@ jobs: if-no-files-found: error retention-days: 1 - # If this build is NOT a PR and passed in a REDEPLOY_SANDBOX_URL secret, - # Then redeploy https://sandbox.dspace.org if this build is for our deployment architecture and 'main' branch. - - name: Redeploy sandbox.dspace.org (based on main branch) - if: | - !matrix.isPR && - env.REDEPLOY_SANDBOX_URL != '' && - matrix.arch == env.DEPLOY_ARCH && - github.ref_name == env.DEPLOY_SANDBOX_BRANCH - run: | - curl -X POST $REDEPLOY_SANDBOX_URL + #------------------------------------------------------------------------------ + # Second, we build the image again in order to store it in a local TAR file. + # This TAR of the image is cached/saved as an artifact, so that it can be used + # by later jobs to install the brand-new images for automated testing. + # This TAR build is performed BOTH for PRs and for branch commits (non-PRs). + # + # (This approach has the advantage of avoiding having to download the newly built + # image from DockerHub or GHCR during automated testing.) + # + # See the 'docker-deploy' job in docker.yml as an example of where this TAR is used. + #------------------------------------------------------------------------------- + # Build local image (again) and store in a TAR file in /tmp directory + # This step is only done for AMD64, as that's the only image we use in our automated testing (at this time). + # NOTE: This step cannot be combined with the build above as it's a different type of output. + - name: Build and push image to local TAR file + if: ${{ matrix.arch == 'linux/amd64'}} + uses: docker/build-push-action@v5 + with: + build-contexts: | + ${{ inputs.dockerfile_additional_contexts }} + context: ${{ inputs.dockerfile_context }} + file: ${{ inputs.dockerfile_path }} + # Tell DSpace's Docker files to use the build registry instead of DockerHub + build-args: + DOCKER_REGISTRY=${{ env.DOCKER_BUILD_REGISTRY }} + platforms: ${{ matrix.arch }} + tags: ${{ steps.meta_build.outputs.tags }} + labels: ${{ steps.meta_build.outputs.labels }} + # Use GitHub cache to load cached Docker images and cache the results of this build + # This decreases the number of images we need to fetch from DockerHub + cache-from: type=gha,scope=${{ inputs.build_id }} + cache-to: type=gha,scope=${{ inputs.build_id }},mode=max + # Export image to a local TAR file + outputs: type=docker,dest=/tmp/${{ inputs.build_id }}.tar - # If this build is NOT a PR and passed in a REDEPLOY_DEMO_URL secret, - # Then redeploy https://demo.dspace.org if this build is for our deployment architecture and demo branch. - - name: Redeploy demo.dspace.org (based on maintenance branch) - if: | - !matrix.isPR && - env.REDEPLOY_DEMO_URL != '' && - matrix.arch == env.DEPLOY_ARCH && - github.ref_name == env.DEPLOY_DEMO_BRANCH - run: | - curl -X POST $REDEPLOY_DEMO_URL + # Upload the local docker image (in TAR file) to a build Artifact + # This step is only done for AMD64, as that's the only image we use in our automated testing (at this time). + - name: Upload local image TAR to artifact + if: ${{ matrix.arch == 'linux/amd64'}} + uses: actions/upload-artifact@v4 + with: + name: docker-image-${{ inputs.build_id }}-${{ env.ARCH_NAME }} + path: /tmp/${{ inputs.build_id }}.tar + if-no-files-found: error + retention-days: 1 - # Merge Docker digests (from various architectures) into a manifest. - # This runs after all Docker builds complete above, and it tells hub.docker.com - # that these builds should be all included in the manifest for this tag. - # (e.g. AMD64 and ARM64 should be listed as options under the same tagged Docker image) + ########################################################################################## + # Merge Docker digests (from various architectures) into a single manifest. + # This runs after all Docker builds complete above. The purpose is to include all builds + # under a single manifest for this tag. + # (e.g. both linux/amd64 and linux/arm64 should be listed under the same tagged Docker image) + ########################################################################################## docker-build_manifest: + # Only run if this is NOT a PR if: ${{ github.event_name != 'pull_request' }} runs-on: ubuntu-latest needs: @@ -207,29 +257,102 @@ jobs: pattern: digests-${{ inputs.build_id }}-* merge-multiple: true + - name: Login to ${{ env.DOCKER_BUILD_REGISTRY }} + uses: docker/login-action@v3 + with: + registry: ${{ env.DOCKER_BUILD_REGISTRY }} + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: Add Docker metadata for image id: meta uses: docker/metadata-action@v5 + with: + images: ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }} + tags: ${{ env.IMAGE_TAGS }} + flavor: ${{ env.TAGS_FLAVOR }} + + - name: Create manifest list from digests and push to ${{ env.DOCKER_BUILD_REGISTRY }} + working-directory: /tmp/digests + run: | + docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ + $(printf '${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}@sha256:%s ' *) + + - name: Inspect manifest in ${{ env.DOCKER_BUILD_REGISTRY }} + run: | + docker buildx imagetools inspect ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.meta.outputs.version }} + + ########################################################################################## + # Copy images / manifest to DockerHub. + # This MUST run after *both* images (AMD64 and ARM64) are built and uploaded to GitHub + # Container Registry (GHCR). Attempting to run this in parallel to GHCR builds can result + # in a race condition...i.e. the copy to DockerHub may fail if GHCR image has been updated + # at the moment when the copy occurs. + ########################################################################################## + docker-copy_to_dockerhub: + # Only run if this is NOT a PR + if: ${{ github.event_name != 'pull_request' }} + runs-on: ubuntu-latest + needs: + - docker-build_manifest + + steps: + # 'regctl' is used to more easily copy the image to DockerHub and obtain the digest from DockerHub + # See https://github.com/regclient/regclient/blob/main/docs/regctl.md + - name: Install regctl for Docker registry tools + uses: regclient/actions/regctl-installer@main + with: + release: 'v0.8.0' + + # This recreates Docker tags for DockerHub + - name: Add Docker metadata for image + id: meta_dockerhub + uses: docker/metadata-action@v5 with: images: ${{ env.IMAGE_NAME }} tags: ${{ env.IMAGE_TAGS }} flavor: ${{ env.TAGS_FLAVOR }} - - name: Login to Docker Hub + # Login to source registry first, as this is where we are copying *from* + - name: Login to ${{ env.DOCKER_BUILD_REGISTRY }} + uses: docker/login-action@v3 + with: + registry: ${{ env.DOCKER_BUILD_REGISTRY }} + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + # Login to DockerHub, since this is where we are copying *to* + - name: Login to DockerHub uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_ACCESS_TOKEN }} - - name: Create manifest list from digests and push - working-directory: /tmp/digests + # Copy the image from source to DockerHub + - name: Copy image from ${{ env.DOCKER_BUILD_REGISTRY }} to docker.io run: | - docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ - $(printf '${{ env.IMAGE_NAME }}@sha256:%s ' *) + regctl image copy ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.meta_dockerhub.outputs.version }} docker.io/${{ env.IMAGE_NAME }}:${{ steps.meta_dockerhub.outputs.version }} - - name: Inspect image + #-------------------------------------------------------------------- + # Finally, check whether demo.dspace.org or sandbox.dspace.org need + # to be redeployed based on these new DockerHub images. + #-------------------------------------------------------------------- + # If this build is for the branch that Sandbox uses and passed in a REDEPLOY_SANDBOX_URL secret, + # Then redeploy https://sandbox.dspace.org + - name: Redeploy sandbox.dspace.org (based on main branch) + if: | + env.REDEPLOY_SANDBOX_URL != '' && + github.ref_name == env.DEPLOY_SANDBOX_BRANCH + run: | + curl -X POST $REDEPLOY_SANDBOX_URL + # If this build is for the branch that Demo uses and passed in a REDEPLOY_DEMO_URL secret, + # Then redeploy https://demo.dspace.org + - name: Redeploy demo.dspace.org (based on maintenance branch) + if: | + env.REDEPLOY_DEMO_URL != '' && + github.ref_name == env.DEPLOY_DEMO_BRANCH run: | - docker buildx imagetools inspect ${{ env.IMAGE_NAME }}:${{ steps.meta.outputs.version }} + curl -X POST $REDEPLOY_DEMO_URL \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 75817980379c..5aece8b7d37e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,10 +6,14 @@ # This Dockerfile uses JDK17 by default. # To build with other versions, use "--build-arg JDK_VERSION=[value]" ARG JDK_VERSION=17 +# The Docker version tag to build from ARG DSPACE_VERSION=latest +# The Docker registry to use for DSpace images. Defaults to "docker.io" +# NOTE: non-DSpace images are hardcoded to use "docker.io" and are not impacted by this build argument +ARG DOCKER_REGISTRY=docker.io # Step 1 - Run Maven Build -FROM dspace/dspace-dependencies:${DSPACE_VERSION} AS build +FROM ${DOCKER_REGISTRY}/dspace/dspace-dependencies:${DSPACE_VERSION} AS build ARG TARGET_DIR=dspace-installer WORKDIR /app # The dspace-installer directory will be written to /install @@ -31,35 +35,38 @@ RUN mvn --no-transfer-progress package ${MAVEN_FLAGS} && \ RUN rm -rf /install/webapps/server/ # Step 2 - Run Ant Deploy -FROM eclipse-temurin:${JDK_VERSION} AS ant_build +FROM docker.io/eclipse-temurin:${JDK_VERSION} AS ant_build ARG TARGET_DIR=dspace-installer # COPY the /install directory from 'build' container to /dspace-src in this container COPY --from=build /install /dspace-src WORKDIR /dspace-src # Create the initial install deployment using ANT -ENV ANT_VERSION 1.10.13 -ENV ANT_HOME /tmp/ant-$ANT_VERSION -ENV PATH $ANT_HOME/bin:$PATH -# Need wget to install ant -RUN apt-get update \ - && apt-get install -y --no-install-recommends wget \ - && apt-get purge -y --auto-remove \ - && rm -rf /var/lib/apt/lists/* +ENV ANT_VERSION=1.10.13 +ENV ANT_HOME=/tmp/ant-$ANT_VERSION +ENV PATH=$ANT_HOME/bin:$PATH # Download and install 'ant' RUN mkdir $ANT_HOME && \ - wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME + curl --silent --show-error --location --fail --retry 5 --output /tmp/apache-ant.tar.gz \ + https://archive.apache.org/dist/ant/binaries/apache-ant-${ANT_VERSION}-bin.tar.gz && \ + tar -zx --strip-components=1 -f /tmp/apache-ant.tar.gz -C $ANT_HOME && \ + rm /tmp/apache-ant.tar.gz # Run necessary 'ant' deploy scripts RUN ant init_installation update_configs update_code update_webapps # Step 3 - Start up DSpace via Runnable JAR -FROM eclipse-temurin:${JDK_VERSION} +FROM docker.io/eclipse-temurin:${JDK_VERSION} # NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration. ENV DSPACE_INSTALL=/dspace # Copy the /dspace directory from 'ant_build' container to /dspace in this container COPY --from=ant_build /dspace $DSPACE_INSTALL WORKDIR $DSPACE_INSTALL -# Expose Tomcat port -EXPOSE 8080 +# Need host command for "[dspace]/bin/make-handle-config" +RUN apt-get update \ + && apt-get install -y --no-install-recommends host \ + && apt-get purge -y --auto-remove \ + && rm -rf /var/lib/apt/lists/* +# Expose Tomcat port (8080) & Handle Server HTTP port (8000) +EXPOSE 8080 8000 # Give java extra memory (2GB) ENV JAVA_OPTS=-Xmx2000m # On startup, run DSpace Runnable JAR diff --git a/Dockerfile.cli b/Dockerfile.cli index 5254d1eb4d69..e43c8eb95dd6 100644 --- a/Dockerfile.cli +++ b/Dockerfile.cli @@ -6,10 +6,14 @@ # This Dockerfile uses JDK17 by default. # To build with other versions, use "--build-arg JDK_VERSION=[value]" ARG JDK_VERSION=17 +# The Docker version tag to build from ARG DSPACE_VERSION=latest +# The Docker registry to use for DSpace images. Defaults to "docker.io" +# NOTE: non-DSpace images are hardcoded to use "docker.io" and are not impacted by this build argument +ARG DOCKER_REGISTRY=docker.io # Step 1 - Run Maven Build -FROM dspace/dspace-dependencies:${DSPACE_VERSION} AS build +FROM ${DOCKER_REGISTRY}/dspace/dspace-dependencies:${DSPACE_VERSION} AS build ARG TARGET_DIR=dspace-installer WORKDIR /app # The dspace-installer directory will be written to /install @@ -25,28 +29,26 @@ RUN mvn --no-transfer-progress package && \ mvn clean # Step 2 - Run Ant Deploy -FROM eclipse-temurin:${JDK_VERSION} AS ant_build +FROM docker.io/eclipse-temurin:${JDK_VERSION} AS ant_build ARG TARGET_DIR=dspace-installer # COPY the /install directory from 'build' container to /dspace-src in this container COPY --from=build /install /dspace-src WORKDIR /dspace-src # Create the initial install deployment using ANT -ENV ANT_VERSION 1.10.13 -ENV ANT_HOME /tmp/ant-$ANT_VERSION -ENV PATH $ANT_HOME/bin:$PATH -# Need wget to install ant -RUN apt-get update \ - && apt-get install -y --no-install-recommends wget \ - && apt-get purge -y --auto-remove \ - && rm -rf /var/lib/apt/lists/* +ENV ANT_VERSION=1.10.13 +ENV ANT_HOME=/tmp/ant-$ANT_VERSION +ENV PATH=$ANT_HOME/bin:$PATH # Download and install 'ant' RUN mkdir $ANT_HOME && \ - wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME + curl --silent --show-error --location --fail --retry 5 --output /tmp/apache-ant.tar.gz \ + https://archive.apache.org/dist/ant/binaries/apache-ant-${ANT_VERSION}-bin.tar.gz && \ + tar -zx --strip-components=1 -f /tmp/apache-ant.tar.gz -C $ANT_HOME && \ + rm /tmp/apache-ant.tar.gz # Run necessary 'ant' deploy scripts RUN ant init_installation update_configs update_code # Step 3 - Run jdk -FROM eclipse-temurin:${JDK_VERSION} +FROM docker.io/eclipse-temurin:${JDK_VERSION} # NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration. ENV DSPACE_INSTALL=/dspace # Copy the /dspace directory from 'ant_build' container to /dspace in this container diff --git a/Dockerfile.dependencies b/Dockerfile.dependencies index f3bf1f833205..04233cd415fa 100644 --- a/Dockerfile.dependencies +++ b/Dockerfile.dependencies @@ -6,8 +6,8 @@ # To build with other versions, use "--build-arg JDK_VERSION=[value]" ARG JDK_VERSION=17 -# Step 1 - Run Maven Build -FROM maven:3-eclipse-temurin-${JDK_VERSION} AS build +# Step 1 - Download all Dependencies +FROM docker.io/maven:3-eclipse-temurin-${JDK_VERSION} AS build ARG TARGET_DIR=dspace-installer WORKDIR /app # Create the 'dspace' user account & home directory @@ -19,16 +19,60 @@ RUN chown -Rv dspace: /app # Switch to dspace user & run below commands as that user USER dspace -# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents) -ADD --chown=dspace . /app/ +# This next part may look odd, but it speeds up the build of this image *significantly*. +# Copy ONLY the POMs to this image (from local machine). This will allow us to download all dependencies *without* +# performing any code compilation steps. + +# Parent POM +ADD --chown=dspace pom.xml /app/ +RUN mkdir -p /app/dspace + +# 'dspace' module POM. Includes 'additions' ONLY, as it's the only submodule that is required to exist. +ADD --chown=dspace dspace/pom.xml /app/dspace/ +RUN mkdir -p /app/dspace/modules/ +ADD --chown=dspace dspace/modules/pom.xml /app/dspace/modules/ +RUN mkdir -p /app/dspace/modules/additions +ADD --chown=dspace dspace/modules/additions/pom.xml /app/dspace/modules/additions/ + +# 'dspace-api' module POM +RUN mkdir -p /app/dspace-api +ADD --chown=dspace dspace-api/pom.xml /app/dspace-api/ + +# 'dspace-iiif' module POM +RUN mkdir -p /app/dspace-iiif +ADD --chown=dspace dspace-iiif/pom.xml /app/dspace-iiif/ + +# 'dspace-oai' module POM +RUN mkdir -p /app/dspace-oai +ADD --chown=dspace dspace-oai/pom.xml /app/dspace-oai/ + +# 'dspace-rdf' module POM +RUN mkdir -p /app/dspace-rdf +ADD --chown=dspace dspace-rdf/pom.xml /app/dspace-rdf/ + +# 'dspace-server-webapp' module POM +RUN mkdir -p /app/dspace-server-webapp +ADD --chown=dspace dspace-server-webapp/pom.xml /app/dspace-server-webapp/ + +# 'dspace-services' module POM +RUN mkdir -p /app/dspace-services +ADD --chown=dspace dspace-services/pom.xml /app/dspace-services/ + +# 'dspace-sword' module POM +RUN mkdir -p /app/dspace-sword +ADD --chown=dspace dspace-sword/pom.xml /app/dspace-sword/ + +# 'dspace-swordv2' module POM +RUN mkdir -p /app/dspace-swordv2 +ADD --chown=dspace dspace-swordv2/pom.xml /app/dspace-swordv2/ # Trigger the installation of all maven dependencies (hide download progress messages) # Maven flags here ensure that we skip final assembly, skip building test environment and skip all code verification checks. -# These flags speed up this installation as much as reasonably possible. -ENV MAVEN_FLAGS="-P-assembly -P-test-environment -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true" -RUN mvn --no-transfer-progress install ${MAVEN_FLAGS} +# These flags speed up this installation and skip tasks we cannot perform as we don't have the full source code. +ENV MAVEN_FLAGS="-P-assembly -P-test-environment -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxjc.skip=true -Dxml.skip=true" +RUN mvn --no-transfer-progress verify ${MAVEN_FLAGS} -# Clear the contents of the /app directory (including all maven builds), so no artifacts remain. +# Clear the contents of the /app directory (including all maven target folders), so no artifacts remain. # This ensures when dspace:dspace is built, it will use the Maven local cache (~/.m2) for dependencies USER root RUN rm -rf /app/* diff --git a/Dockerfile.test b/Dockerfile.test index f3acef00e825..90266101dbf0 100644 --- a/Dockerfile.test +++ b/Dockerfile.test @@ -8,10 +8,14 @@ # This Dockerfile uses JDK17 by default. # To build with other versions, use "--build-arg JDK_VERSION=[value]" ARG JDK_VERSION=17 +# The Docker version tag to build from ARG DSPACE_VERSION=latest +# The Docker registry to use for DSpace images. Defaults to "docker.io" +# NOTE: non-DSpace images are hardcoded to use "docker.io" and are not impacted by this build argument +ARG DOCKER_REGISTRY=docker.io # Step 1 - Run Maven Build -FROM dspace/dspace-dependencies:${DSPACE_VERSION} AS build +FROM ${DOCKER_REGISTRY}/dspace/dspace-dependencies:${DSPACE_VERSION} AS build ARG TARGET_DIR=dspace-installer WORKDIR /app # The dspace-installer directory will be written to /install @@ -30,33 +34,36 @@ RUN mvn --no-transfer-progress package && \ RUN rm -rf /install/webapps/server/ # Step 2 - Run Ant Deploy -FROM eclipse-temurin:${JDK_VERSION} AS ant_build +FROM docker.io/eclipse-temurin:${JDK_VERSION} AS ant_build ARG TARGET_DIR=dspace-installer # COPY the /install directory from 'build' container to /dspace-src in this container COPY --from=build /install /dspace-src WORKDIR /dspace-src # Create the initial install deployment using ANT -ENV ANT_VERSION 1.10.12 -ENV ANT_HOME /tmp/ant-$ANT_VERSION -ENV PATH $ANT_HOME/bin:$PATH -# Need wget to install ant -RUN apt-get update \ - && apt-get install -y --no-install-recommends wget \ - && apt-get purge -y --auto-remove \ - && rm -rf /var/lib/apt/lists/* +ENV ANT_VERSION=1.10.12 +ENV ANT_HOME=/tmp/ant-$ANT_VERSION +ENV PATH=$ANT_HOME/bin:$PATH # Download and install 'ant' RUN mkdir $ANT_HOME && \ - wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME + curl --silent --show-error --location --fail --retry 5 --output /tmp/apache-ant.tar.gz \ + https://archive.apache.org/dist/ant/binaries/apache-ant-${ANT_VERSION}-bin.tar.gz && \ + tar -zx --strip-components=1 -f /tmp/apache-ant.tar.gz -C $ANT_HOME && \ + rm /tmp/apache-ant.tar.gz # Run necessary 'ant' deploy scripts RUN ant init_installation update_configs update_code update_webapps # Step 3 - Start up DSpace via Runnable JAR -FROM eclipse-temurin:${JDK_VERSION} +FROM docker.io/eclipse-temurin:${JDK_VERSION} # NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration. ENV DSPACE_INSTALL=/dspace # Copy the /dspace directory from 'ant_build' container to /dspace in this container COPY --from=ant_build /dspace $DSPACE_INSTALL WORKDIR $DSPACE_INSTALL +# Need host command for "[dspace]/bin/make-handle-config" +RUN apt-get update \ + && apt-get install -y --no-install-recommends host \ + && apt-get purge -y --auto-remove \ + && rm -rf /var/lib/apt/lists/* # Expose Tomcat port and debugging port EXPOSE 8080 8000 # Give java extra memory (2GB) diff --git a/docker-compose-cli.yml b/docker-compose-cli.yml index 91f89916d208..3e2c9ba6a50a 100644 --- a/docker-compose-cli.yml +++ b/docker-compose-cli.yml @@ -6,7 +6,7 @@ networks: external: true services: dspace-cli: - image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-latest}" + image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-latest}" container_name: dspace-cli build: context: . diff --git a/docker-compose.yml b/docker-compose.yml index 6a930a8d31ec..ab4f8adc98c0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -28,7 +28,7 @@ services: # from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above. proxies__P__trusted__P__ipranges: '172.23.0' LOGGING_CONFIG: /dspace/config/log4j2-container.xml - image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-latest-test}" + image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-latest-test}" build: context: . dockerfile: Dockerfile.test @@ -64,7 +64,7 @@ services: dspacedb: container_name: dspacedb # Uses a custom Postgres image with pgcrypto installed - image: "${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-latest}" + image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-latest}" build: # Must build out of subdirectory to have access to install script for pgcrypto context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/ @@ -84,7 +84,7 @@ services: # DSpace Solr container dspacesolr: container_name: dspacesolr - image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-latest}" + image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-latest}" build: context: ./dspace/src/main/docker/dspace-solr/ # Provide path to Solr configs necessary to build Docker image diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index a80f27bc22be..599303275f9f 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -341,6 +341,14 @@ org.apache.logging.log4j log4j-api + + org.apache.logging.log4j + log4j-core + + + org.apache.logging.log4j + log4j-slf4j2-impl + org.hibernate.orm hibernate-core @@ -388,6 +396,13 @@ org.springframework spring-orm + + + + org.springframework + spring-jcl + + @@ -406,6 +421,16 @@ org.mortbay.jasper apache-jsp + + + org.bouncycastle + bcpkix-jdk15on + + + org.bouncycastle + bcprov-jdk15on + @@ -623,7 +648,7 @@ dnsjava dnsjava - 3.6.0 + 3.6.2 @@ -672,22 +697,6 @@ com.google.apis google-api-services-analytics - - com.google.api-client - google-api-client - - - com.google.http-client - google-http-client - - - com.google.http-client - google-http-client-jackson2 - - - com.google.oauth-client - google-oauth-client - @@ -702,7 +711,6 @@ jakarta.inject jakarta.inject-api - 2.0.1 @@ -733,7 +741,7 @@ com.amazonaws aws-java-sdk-s3 - 1.12.261 + 1.12.779 - - - io.netty - netty-buffer - 4.1.114.Final - - - io.netty - netty-transport - 4.1.114.Final - - - io.netty - netty-transport-native-unix-common - 4.1.114.Final - - - io.netty - netty-common - 4.1.114.Final - - - io.netty - netty-handler - 4.1.114.Final - - - io.netty - netty-codec - 4.1.114.Final - - - org.apache.velocity - velocity-engine-core - 2.3 - - - org.xmlunit - xmlunit-core - 2.10.0 - test - - - com.github.java-json-tools - json-schema-validator - 2.2.14 - - - jakarta.validation - jakarta.validation-api - 3.1.0 - - - io.swagger - swagger-core - 1.6.2 - - - org.scala-lang - scala-library - 2.13.11 - test - - - - diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/TikaTextExtractionFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/TikaTextExtractionFilter.java index e83bf706ed02..17e7b85e9bfc 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/TikaTextExtractionFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/TikaTextExtractionFilter.java @@ -18,6 +18,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.poi.util.IOUtils; import org.apache.tika.Tika; import org.apache.tika.exception.TikaException; import org.apache.tika.metadata.Metadata; @@ -72,21 +73,23 @@ public InputStream getDestinationStream(Item currentItem, InputStream source, bo // Not using temporary file. We'll use Tika's default in-memory parsing. // Get maximum characters to extract. Default is 100,000 chars, which is also Tika's default setting. String extractedText; - int maxChars = configurationService.getIntProperty("textextractor.max-chars", 100000); + int maxChars = configurationService.getIntProperty("textextractor.max-chars", 100_000); try { // Use Tika to extract text from input. Tika will automatically detect the file type. Tika tika = new Tika(); tika.setMaxStringLength(maxChars); // Tell Tika the maximum number of characters to extract + IOUtils.setByteArrayMaxOverride( + configurationService.getIntProperty("textextractor.max-array", 100_000_000)); extractedText = tika.parseToString(source); } catch (IOException e) { System.err.format("Unable to extract text from bitstream in Item %s%n", currentItem.getID().toString()); - e.printStackTrace(); + e.printStackTrace(System.err); log.error("Unable to extract text from bitstream in Item {}", currentItem.getID().toString(), e); throw e; } catch (OutOfMemoryError oe) { System.err.format("OutOfMemoryError occurred when extracting text from bitstream in Item %s. " + "You may wish to enable 'textextractor.use-temp-file'.%n", currentItem.getID().toString()); - oe.printStackTrace(); + oe.printStackTrace(System.err); log.error("OutOfMemoryError occurred when extracting text from bitstream in Item {}. " + "You may wish to enable 'textextractor.use-temp-file'.", currentItem.getID().toString(), oe); throw oe; diff --git a/dspace-api/src/main/java/org/dspace/app/statistics/package.html b/dspace-api/src/main/java/org/dspace/app/statistics/package.html index a6d8d8699cf7..931a7039080d 100644 --- a/dspace-api/src/main/java/org/dspace/app/statistics/package.html +++ b/dspace-api/src/main/java/org/dspace/app/statistics/package.html @@ -46,8 +46,6 @@
writes event records to the Java logger.
{@link org.dspace.statistics.SolrLoggerUsageEventListener SolrLoggerUsageEventListener}
writes event records to Solr.
-
{@link org.dspace.google.GoogleRecorderEventListener GoogleRecorderEventListener}<.dt> -
writes event records to Google Analytics.
diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java index dd88390cb856..c96be33d0132 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java @@ -163,7 +163,7 @@ public class DCInput { * The scope of the input sets, this restricts hidden metadata fields from * view by the end user during submission. */ - public static final String SUBMISSION_SCOPE = "submit"; + public static final String SUBMISSION_SCOPE = "submission"; /** * Class constructor for creating a DCInput object based on the contents of @@ -262,7 +262,7 @@ protected void initRegex(String regex) { /** * Is this DCInput for display in the given scope? The scope should be - * either "workflow" or "submit", as per the input forms definition. If the + * either "workflow" or "submission", as per the input forms definition. If the * internal visibility is set to "null" then this will always return true. * * @param scope String identifying the scope that this input's visibility diff --git a/dspace-api/src/main/java/org/dspace/app/util/OpenSearchServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/util/OpenSearchServiceImpl.java index bff741b5ca42..2800ae6d10a2 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/OpenSearchServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/util/OpenSearchServiceImpl.java @@ -14,7 +14,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.Map; import com.rometools.modules.opensearch.OpenSearchModule; import com.rometools.modules.opensearch.entity.OSQuery; @@ -58,12 +57,12 @@ public class OpenSearchServiceImpl implements OpenSearchService { private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenSearchServiceImpl.class); // Namespaces used - protected final String osNs = "http://a9.com/-/spec/opensearch/1.1/"; + protected final static String osNs = "http://a9.com/-/spec/opensearch/1.1/"; - @Autowired(required = true) + @Autowired protected ConfigurationService configurationService; - @Autowired(required = true) + @Autowired protected HandleService handleService; protected OpenSearchServiceImpl() { @@ -119,11 +118,10 @@ public String getDescription(String scope) { @Override public String getResultsString(Context context, String format, String query, int totalResults, int start, - int pageSize, - IndexableObject scope, List results, - Map labels) throws IOException { + int pageSize, IndexableObject scope, List results) + throws IOException { try { - return getResults(context, format, query, totalResults, start, pageSize, scope, results, labels) + return getResults(context, format, query, totalResults, start, pageSize, scope, results) .outputString(); } catch (FeedException e) { log.error(e.toString(), e); @@ -133,11 +131,10 @@ public String getResultsString(Context context, String format, String query, int @Override public Document getResultsDoc(Context context, String format, String query, int totalResults, int start, - int pageSize, - IndexableObject scope, List results, Map labels) + int pageSize, IndexableObject scope, List results) throws IOException { try { - return getResults(context, format, query, totalResults, start, pageSize, scope, results, labels) + return getResults(context, format, query, totalResults, start, pageSize, scope, results) .outputW3CDom(); } catch (FeedException e) { log.error(e.toString(), e); @@ -146,8 +143,7 @@ public Document getResultsDoc(Context context, String format, String query, int } protected SyndicationFeed getResults(Context context, String format, String query, int totalResults, int start, - int pageSize, IndexableObject scope, - List results, Map labels) { + int pageSize, IndexableObject scope, List results) { // Encode results in requested format if ("rss".equals(format)) { format = "rss_2.0"; @@ -156,7 +152,7 @@ protected SyndicationFeed getResults(Context context, String format, String quer } SyndicationFeed feed = new SyndicationFeed(); - feed.populate(null, context, scope, results, labels); + feed.populate(null, context, scope, results); feed.setType(format); feed.addModule(openSearchMarkup(query, totalResults, start, pageSize)); return feed; diff --git a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java index b4ba69ffa285..5d64009727c6 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java @@ -11,6 +11,7 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -135,8 +136,6 @@ public class SyndicationFeed { protected String[] podcastableMIMETypes = configurationService.getArrayProperty("webui.feed.podcast.mimetypes", new String[] {"audio/x-mpeg"}); - // -------- Instance variables: - // the feed object we are building protected SyndFeed feed = null; @@ -146,9 +145,6 @@ public class SyndicationFeed { protected CommunityService communityService; protected ItemService itemService; - /** - * Constructor. - */ public SyndicationFeed() { feed = new SyndFeedImpl(); ContentServiceFactory contentServiceFactory = ContentServiceFactory.getInstance(); @@ -157,16 +153,6 @@ public SyndicationFeed() { communityService = contentServiceFactory.getCommunityService(); } - /** - * Returns list of metadata selectors used to compose the description element - * - * @return selector list - format 'schema.element[.qualifier]' - */ - public static String[] getDescriptionSelectors() { - return (String[]) ArrayUtils.clone(descriptionFields); - } - - /** * Fills in the feed and entry-level metadata from DSpace objects. * @@ -174,15 +160,17 @@ public static String[] getDescriptionSelectors() { * @param context context * @param dso the scope * @param items array of objects - * @param labels label map */ public void populate(HttpServletRequest request, Context context, IndexableObject dso, - List items, Map labels) { + List items) { String logoURL = null; String objectURL = null; String defaultTitle = null; boolean podcastFeed = false; this.request = request; + + Map labels = getLabels(); + // dso is null for the whole site, or a search without scope if (dso == null) { defaultTitle = configurationService.getProperty("dspace.name"); @@ -553,5 +541,19 @@ protected String getOneDC(Item item, String field) { List dcv = itemService.getMetadataByMetadataString(item, field); return (dcv.size() > 0) ? dcv.get(0).getValue() : null; } -} + /** + * Internal method to get labels for the returned document + */ + private Map getLabels() { + // TODO: get strings from translation file or configuration + Map labelMap = new HashMap<>(); + labelMap.put(SyndicationFeed.MSG_UNTITLED, "notitle"); + labelMap.put(SyndicationFeed.MSG_LOGO_TITLE, "logo.title"); + labelMap.put(SyndicationFeed.MSG_FEED_DESCRIPTION, "general-feed.description"); + for (String selector : descriptionFields) { + labelMap.put("metadata." + selector, selector); + } + return labelMap; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/util/service/OpenSearchService.java b/dspace-api/src/main/java/org/dspace/app/util/service/OpenSearchService.java index cf62bca30e2a..78b208faa2bc 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/service/OpenSearchService.java +++ b/dspace-api/src/main/java/org/dspace/app/util/service/OpenSearchService.java @@ -10,7 +10,6 @@ import java.io.IOException; import java.sql.SQLException; import java.util.List; -import java.util.Map; import org.dspace.content.DSpaceObject; import org.dspace.core.Context; @@ -86,14 +85,12 @@ public interface OpenSearchService { * @param pageSize - page size * @param scope - search scope, null or the community/collection * @param results the retrieved DSpace objects satisfying search - * @param labels labels to apply - format specific * @return formatted search results * @throws IOException if IO error */ public String getResultsString(Context context, String format, String query, int totalResults, int start, - int pageSize, - IndexableObject scope, List results, - Map labels) throws IOException; + int pageSize, IndexableObject scope, List results) + throws IOException; /** * Returns a formatted set of search results as a document @@ -106,13 +103,11 @@ public String getResultsString(Context context, String format, String query, int * @param pageSize - page size * @param scope - search scope, null or the community/collection * @param results the retrieved DSpace objects satisfying search - * @param labels labels to apply - format specific * @return formatted search results * @throws IOException if IO error */ public Document getResultsDoc(Context context, String format, String query, int totalResults, int start, - int pageSize, - IndexableObject scope, List results, Map labels) + int pageSize, IndexableObject scope, List results) throws IOException; public DSpaceObject resolveScope(Context context, String scope) throws SQLException; diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java b/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java index 351c36248209..be7a34086a46 100644 --- a/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java +++ b/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java @@ -422,9 +422,6 @@ private BrowseInfo browseByValue(BrowserScope bs) } } - // this is the total number of results in answer to the query - int total = getTotalResults(true); - // set the ordering field (there is only one option) dao.setOrderField("sort_value"); @@ -444,6 +441,9 @@ private BrowseInfo browseByValue(BrowserScope bs) dao.setOffset(offset); dao.setLimit(scope.getResultsPerPage()); + // this is the total number of results in answer to the query + int total = getTotalResults(true); + // Holder for the results List results = null; @@ -680,33 +680,9 @@ private int getTotalResults(boolean distinct) // tell the browse query whether we are distinct dao.setDistinct(distinct); - // ensure that the select is set to "*" - String[] select = {"*"}; - dao.setCountValues(select); - - // FIXME: it would be nice to have a good way of doing this in the DAO - // now reset all of the fields that we don't want to have constraining - // our count, storing them locally to reinstate later - String focusField = dao.getJumpToField(); - String focusValue = dao.getJumpToValue(); - int limit = dao.getLimit(); - int offset = dao.getOffset(); - - dao.setJumpToField(null); - dao.setJumpToValue(null); - dao.setLimit(-1); - dao.setOffset(-1); - // perform the query and get the result int count = dao.doCountQuery(); - // now put back the values we removed for this method - dao.setJumpToField(focusField); - dao.setJumpToValue(focusValue); - dao.setLimit(limit); - dao.setOffset(offset); - dao.setCountValues(null); - log.debug(LogHelper.getHeader(context, "get_total_results_return", "return=" + count)); return count; diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java index 0eddfcac9194..ff6de08583be 100644 --- a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java +++ b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java @@ -543,19 +543,6 @@ public String getDistinctTableName() { return getTableName(false, false, true, false); } - /** - * Get the name of the column that is used to store the default value column - * - * @return the name of the value column - */ - public String getValueColumn() { - if (!isDate()) { - return "sort_text_value"; - } else { - return "text_value"; - } - } - /** * Get the name of the primary key index column * @@ -565,35 +552,6 @@ public String getIndexColumn() { return "id"; } - /** - * Is this browse index type for a title? - * - * @return true if title type, false if not - */ -// public boolean isTitle() -// { -// return "title".equals(getDataType()); -// } - - /** - * Is the browse index type for a date? - * - * @return true if date type, false if not - */ - public boolean isDate() { - return "date".equals(getDataType()); - } - - /** - * Is the browse index type for a plain text type? - * - * @return true if plain text type, false if not - */ -// public boolean isText() -// { -// return "text".equals(getDataType()); -// } - /** * Is the browse index of display type single? * diff --git a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java index f99aab852bf5..1917dec423ec 100644 --- a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java +++ b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java @@ -13,6 +13,8 @@ import java.util.Comparator; import java.util.List; +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.apache.solr.client.solrj.util.ClientUtils; @@ -180,18 +182,33 @@ private DiscoverResult getSolrResponse() throws BrowseException { addDefaultFilterQueries(query); if (distinct) { DiscoverFacetField dff; + + // To get the number of distinct values we use the next "json.facet" query param + // {"entries_count": {"type":"terms","field": "_filter", "limit":0, "numBuckets":true}}" + ObjectNode jsonFacet = JsonNodeFactory.instance.objectNode(); + ObjectNode entriesCount = JsonNodeFactory.instance.objectNode(); + entriesCount.put("type", "terms"); + entriesCount.put("field", facetField + "_filter"); + entriesCount.put("limit", 0); + entriesCount.put("numBuckets", true); + jsonFacet.set("entries_count", entriesCount); + if (StringUtils.isNotBlank(startsWith)) { dff = new DiscoverFacetField(facetField, - DiscoveryConfigurationParameters.TYPE_TEXT, -1, - DiscoveryConfigurationParameters.SORT.VALUE, startsWith); + DiscoveryConfigurationParameters.TYPE_TEXT, limit, + DiscoveryConfigurationParameters.SORT.VALUE, startsWith, offset); + + // Add the prefix to the json facet query + entriesCount.put("prefix", startsWith); } else { dff = new DiscoverFacetField(facetField, - DiscoveryConfigurationParameters.TYPE_TEXT, -1, - DiscoveryConfigurationParameters.SORT.VALUE); + DiscoveryConfigurationParameters.TYPE_TEXT, limit, + DiscoveryConfigurationParameters.SORT.VALUE, offset); } query.addFacetField(dff); query.setFacetMinCount(1); query.setMaxResults(0); + query.addProperty("json.facet", jsonFacet.toString()); } else { query.setMaxResults(limit/* > 0 ? limit : 20*/); if (offset > 0) { @@ -248,8 +265,7 @@ public int doCountQuery() throws BrowseException { DiscoverResult resp = getSolrResponse(); int count = 0; if (distinct) { - List facetResults = resp.getFacetResult(facetField); - count = facetResults.size(); + count = (int) resp.getTotalEntries(); } else { // we need to cast to int to respect the BrowseDAO contract... count = (int) resp.getTotalSearchResults(); @@ -266,8 +282,8 @@ public List doValueQuery() throws BrowseException { DiscoverResult resp = getSolrResponse(); List facet = resp.getFacetResult(facetField); int count = doCountQuery(); - int start = offset > 0 ? offset : 0; - int max = limit > 0 ? limit : count; //if negative, return everything + int start = 0; + int max = facet.size(); List result = new ArrayList<>(); if (ascending) { for (int i = start; i < (start + max) && i < count; i++) { diff --git a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabulary.java b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabulary.java index 444332df97d2..4e30559e1c92 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabulary.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabulary.java @@ -8,6 +8,7 @@ package org.dspace.content.authority; import java.io.File; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -65,14 +66,17 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera protected static String labelTemplate = "//node[@label = '%s']"; protected static String idParentTemplate = "//node[@id = '%s']/parent::isComposedBy/parent::node"; protected static String rootTemplate = "/node"; + protected static String idAttribute = "id"; + protected static String labelAttribute = "label"; protected static String pluginNames[] = null; - protected String vocabularyName = null; protected InputSource vocabulary = null; protected Boolean suggestHierarchy = false; protected Boolean storeHierarchy = true; protected String hierarchyDelimiter = "::"; protected Integer preloadLevel = 1; + protected String valueAttribute = labelAttribute; + protected String valueTemplate = labelTemplate; public DSpaceControlledVocabulary() { super(); @@ -115,7 +119,7 @@ public boolean accept(File dir, String name) { } } - protected void init() { + protected void init(String locale) { if (vocabulary == null) { ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -125,13 +129,25 @@ protected void init() { File.separator + "controlled-vocabularies" + File.separator; String configurationPrefix = "vocabulary.plugin." + vocabularyName; storeHierarchy = config.getBooleanProperty(configurationPrefix + ".hierarchy.store", storeHierarchy); + boolean storeIDs = config.getBooleanProperty(configurationPrefix + ".storeIDs", false); suggestHierarchy = config.getBooleanProperty(configurationPrefix + ".hierarchy.suggest", suggestHierarchy); preloadLevel = config.getIntProperty(configurationPrefix + ".hierarchy.preloadLevel", preloadLevel); String configuredDelimiter = config.getProperty(configurationPrefix + ".delimiter"); if (configuredDelimiter != null) { hierarchyDelimiter = configuredDelimiter.replaceAll("(^\"|\"$)", ""); } + if (storeIDs) { + valueAttribute = idAttribute; + valueTemplate = idTemplate; + } + String filename = vocabulariesPath + vocabularyName + ".xml"; + if (StringUtils.isNotEmpty(locale)) { + String localizedFilename = vocabulariesPath + vocabularyName + "_" + locale + ".xml"; + if (Paths.get(localizedFilename).toFile().exists()) { + filename = localizedFilename; + } + } log.info("Loading " + filename); vocabulary = new InputSource(filename); } @@ -144,9 +160,9 @@ protected String buildString(Node node) { return (""); } else { String parentValue = buildString(node.getParentNode()); - Node currentLabel = node.getAttributes().getNamedItem("label"); - if (currentLabel != null) { - String currentValue = currentLabel.getNodeValue(); + Node currentNodeValue = node.getAttributes().getNamedItem(valueAttribute); + if (currentNodeValue != null) { + String currentValue = currentNodeValue.getNodeValue(); if (parentValue.equals("")) { return currentValue; } else { @@ -160,12 +176,13 @@ protected String buildString(Node node) { @Override public Choices getMatches(String text, int start, int limit, String locale) { - init(); + init(locale); log.debug("Getting matches for '" + text + "'"); String xpathExpression = ""; String[] textHierarchy = text.split(hierarchyDelimiter, -1); for (int i = 0; i < textHierarchy.length; i++) { - xpathExpression += String.format(xpathTemplate, textHierarchy[i].replaceAll("'", "'").toLowerCase()); + xpathExpression += + String.format(xpathTemplate, textHierarchy[i].replaceAll("'", "'").toLowerCase()); } XPath xpath = XPathFactory.newInstance().newXPath(); int total = 0; @@ -184,12 +201,13 @@ public Choices getMatches(String text, int start, int limit, String locale) { @Override public Choices getBestMatch(String text, String locale) { - init(); + init(locale); log.debug("Getting best matches for '" + text + "'"); String xpathExpression = ""; String[] textHierarchy = text.split(hierarchyDelimiter, -1); for (int i = 0; i < textHierarchy.length; i++) { - xpathExpression += String.format(labelTemplate, textHierarchy[i].replaceAll("'", "'")); + xpathExpression += + String.format(valueTemplate, textHierarchy[i].replaceAll("'", "'")); } XPath xpath = XPathFactory.newInstance().newXPath(); List choices = new ArrayList(); @@ -205,19 +223,19 @@ public Choices getBestMatch(String text, String locale) { @Override public String getLabel(String key, String locale) { - return getNodeLabel(key, this.suggestHierarchy); + return getNodeValue(key, locale, this.suggestHierarchy); } @Override public String getValue(String key, String locale) { - return getNodeLabel(key, this.storeHierarchy); + return getNodeValue(key, locale, this.storeHierarchy); } @Override public Choice getChoice(String authKey, String locale) { Node node; try { - node = getNode(authKey); + node = getNode(authKey, locale); } catch (XPathExpressionException e) { return null; } @@ -226,27 +244,27 @@ public Choice getChoice(String authKey, String locale) { @Override public boolean isHierarchical() { - init(); + init(null); return true; } @Override public Choices getTopChoices(String authorityName, int start, int limit, String locale) { - init(); + init(locale); String xpathExpression = rootTemplate; return getChoicesByXpath(xpathExpression, start, limit); } @Override public Choices getChoicesByParent(String authorityName, String parentId, int start, int limit, String locale) { - init(); + init(locale); String xpathExpression = String.format(idTemplate, parentId); return getChoicesByXpath(xpathExpression, start, limit); } @Override public Choice getParentChoice(String authorityName, String childId, String locale) { - init(); + init(locale); try { String xpathExpression = String.format(idParentTemplate, childId); Choice choice = createChoiceFromNode(getNodeFromXPath(xpathExpression)); @@ -259,7 +277,7 @@ public Choice getParentChoice(String authorityName, String childId, String local @Override public Integer getPreloadLevel() { - init(); + init(null); return preloadLevel; } @@ -270,8 +288,8 @@ private boolean isRootElement(Node node) { return false; } - private Node getNode(String key) throws XPathExpressionException { - init(); + private Node getNode(String key, String locale) throws XPathExpressionException { + init(locale); String xpathExpression = String.format(idTemplate, key); Node node = getNodeFromXPath(xpathExpression); return node; @@ -319,16 +337,16 @@ private Map addOtherInformation(String parentCurr, String noteCu return extras; } - private String getNodeLabel(String key, boolean useHierarchy) { + private String getNodeValue(String key, String locale, boolean useHierarchy) { try { - Node node = getNode(key); + Node node = getNode(key, locale); if (Objects.isNull(node)) { return null; } if (useHierarchy) { return this.buildString(node); } else { - return node.getAttributes().getNamedItem("label").getNodeValue(); + return node.getAttributes().getNamedItem(valueAttribute).getNodeValue(); } } catch (XPathExpressionException e) { return (""); @@ -349,7 +367,7 @@ private String getValue(Node node) { if (this.storeHierarchy) { return hierarchy; } else { - return node.getAttributes().getNamedItem("label").getNodeValue(); + return node.getAttributes().getNamedItem(valueAttribute).getNodeValue(); } } diff --git a/dspace-api/src/main/java/org/dspace/ctask/general/BasicLinkChecker.java b/dspace-api/src/main/java/org/dspace/ctask/general/BasicLinkChecker.java index 756d65a0ad09..9bd08bffdbd2 100644 --- a/dspace-api/src/main/java/org/dspace/ctask/general/BasicLinkChecker.java +++ b/dspace-api/src/main/java/org/dspace/ctask/general/BasicLinkChecker.java @@ -19,6 +19,8 @@ import org.dspace.content.MetadataValue; import org.dspace.curate.AbstractCurationTask; import org.dspace.curate.Curator; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; /** * A basic link checker that is designed to be extended. By default this link checker @@ -42,6 +44,9 @@ public class BasicLinkChecker extends AbstractCurationTask { // The log4j logger for this class private static Logger log = org.apache.logging.log4j.LogManager.getLogger(BasicLinkChecker.class); + protected static final ConfigurationService configurationService + = DSpaceServicesFactory.getInstance().getConfigurationService(); + /** * Perform the link checking. @@ -110,7 +115,8 @@ protected List getURLs(Item item) { */ protected boolean checkURL(String url, StringBuilder results) { // Link check the URL - int httpStatus = getResponseStatus(url); + int redirects = 0; + int httpStatus = getResponseStatus(url, redirects); if ((httpStatus >= 200) && (httpStatus < 300)) { results.append(" - " + url + " = " + httpStatus + " - OK\n"); @@ -128,14 +134,24 @@ protected boolean checkURL(String url, StringBuilder results) { * @param url The url to open * @return The HTTP response code (e.g. 200 / 301 / 404 / 500) */ - protected int getResponseStatus(String url) { + protected int getResponseStatus(String url, int redirects) { try { URL theURL = new URL(url); HttpURLConnection connection = (HttpURLConnection) theURL.openConnection(); - int code = connection.getResponseCode(); - connection.disconnect(); + connection.setInstanceFollowRedirects(true); + int statusCode = connection.getResponseCode(); + int maxRedirect = configurationService.getIntProperty("curate.checklinks.max-redirect", 0); + if ((statusCode == HttpURLConnection.HTTP_MOVED_TEMP || statusCode == HttpURLConnection.HTTP_MOVED_PERM || + statusCode == HttpURLConnection.HTTP_SEE_OTHER)) { + connection.disconnect(); + String newUrl = connection.getHeaderField("Location"); + if (newUrl != null && (maxRedirect >= redirects || maxRedirect == -1)) { + redirects++; + return getResponseStatus(newUrl, redirects); + } - return code; + } + return statusCode; } catch (IOException ioe) { // Must be a bad URL diff --git a/dspace-api/src/main/java/org/dspace/discovery/DiscoverResult.java b/dspace-api/src/main/java/org/dspace/discovery/DiscoverResult.java index 00236d2bfe32..a56804e3e7ea 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/DiscoverResult.java +++ b/dspace-api/src/main/java/org/dspace/discovery/DiscoverResult.java @@ -32,6 +32,9 @@ public class DiscoverResult { private List indexableObjects; private Map> facetResults; + // Total count of facet entries calculated for a metadata browsing query + private long totalEntries; + /** * A map that contains all the documents sougth after, the key is a string representation of the Indexable Object */ @@ -64,6 +67,14 @@ public void setTotalSearchResults(long totalSearchResults) { this.totalSearchResults = totalSearchResults; } + public long getTotalEntries() { + return totalEntries; + } + + public void setTotalEntries(long totalEntries) { + this.totalEntries = totalEntries; + } + public int getStart() { return start; } diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java index 67891f122b25..eccc2b7e9a67 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java @@ -1055,6 +1055,8 @@ protected DiscoverResult retrieveResult(Context context, DiscoverQuery query) } //Resolve our facet field values resolveFacetFields(context, query, result, skipLoadingResponse, solrQueryResponse); + //Add total entries count for metadata browsing + resolveEntriesCount(result, solrQueryResponse); } // If any stale entries are found in the current page of results, // we remove those stale entries and rerun the same query again. @@ -1080,7 +1082,39 @@ protected DiscoverResult retrieveResult(Context context, DiscoverQuery query) return result; } - + /** + * Stores the total count of entries for metadata index browsing. The count is calculated by the + * json.facet parameter with the following value: + * + *

+     * {
+     *     "entries_count": {
+     *         "type": "terms",
+     *         "field": "facetNameField_filter",
+     *         "limit": 0,
+     *         "prefix": "prefix_value",
+     *         "numBuckets": true
+     *     }
+     * }
+     * 
+ * + * This value is returned in the facets field of the Solr response. + * + * @param result DiscoverResult object where the total entries count will be stored + * @param solrQueryResponse QueryResponse object containing the solr response + */ + private void resolveEntriesCount(DiscoverResult result, QueryResponse solrQueryResponse) { + + Object facetsObj = solrQueryResponse.getResponse().get("facets"); + if (facetsObj instanceof NamedList) { + NamedList facets = (NamedList) facetsObj; + Object bucketsInfoObj = facets.get("entries_count"); + if (bucketsInfoObj instanceof NamedList) { + NamedList bucketsInfo = (NamedList) bucketsInfoObj; + result.setTotalEntries((int) bucketsInfo.get("numBuckets")); + } + } + } private void resolveFacetFields(Context context, DiscoverQuery query, DiscoverResult result, boolean skipLoadingResponse, QueryResponse solrQueryResponse) throws SQLException { @@ -1411,8 +1445,6 @@ protected String transformFacetField(DiscoverFacetField facetFieldConfig, String } else { return field + "_acid"; } - } else if (facetFieldConfig.getType().equals(DiscoveryConfigurationParameters.TYPE_STANDARD)) { - return field; } else { return field; } diff --git a/dspace-api/src/main/java/org/dspace/eperson/Group2GroupCache.java b/dspace-api/src/main/java/org/dspace/eperson/Group2GroupCache.java index a1c12371f5ff..0c6ea58b1977 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/Group2GroupCache.java +++ b/dspace-api/src/main/java/org/dspace/eperson/Group2GroupCache.java @@ -15,6 +15,7 @@ import jakarta.persistence.JoinColumn; import jakarta.persistence.ManyToOne; import jakarta.persistence.Table; +import jakarta.persistence.UniqueConstraint; import org.dspace.core.HibernateProxyHelper; /** @@ -23,7 +24,7 @@ * @author kevinvandevelde at atmire.com */ @Entity -@Table(name = "group2groupcache") +@Table(name = "group2groupcache", uniqueConstraints = { @UniqueConstraint(columnNames = {"parent_id", "child_id"}) }) public class Group2GroupCache implements Serializable { @Id diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index 3fb20e2f1e6f..4cec4c9c0d93 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -20,6 +20,7 @@ import java.util.UUID; import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.collections4.SetUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.logging.log4j.LogManager; @@ -673,15 +674,14 @@ protected boolean isEPersonInGroup(Context context, Group group, EPerson ePerson /** - * Regenerate the group cache AKA the group2groupcache table in the database - - * meant to be called when a group is added or removed from another group + * Returns a set with pairs of parent and child group UUIDs, representing the new cache table rows. * - * @param context The relevant DSpace Context. - * @param flushQueries flushQueries Flush all pending queries + * @param context The relevant DSpace Context. + * @param flushQueries flushQueries Flush all pending queries + * @return Pairs of parent and child group UUID of the new cache. * @throws SQLException An exception that provides information on a database access error or other errors. */ - protected void rethinkGroupCache(Context context, boolean flushQueries) throws SQLException { - + private Set> computeNewCache(Context context, boolean flushQueries) throws SQLException { Map> parents = new HashMap<>(); List> group2groupResults = groupDAO.getGroup2GroupResults(context, flushQueries); @@ -689,19 +689,8 @@ protected void rethinkGroupCache(Context context, boolean flushQueries) throws S UUID parent = group2groupResult.getLeft(); UUID child = group2groupResult.getRight(); - // if parent doesn't have an entry, create one - if (!parents.containsKey(parent)) { - Set children = new HashSet<>(); - - // add child id to the list - children.add(child); - parents.put(parent, children); - } else { - // parent has an entry, now add the child to the parent's record - // of children - Set children = parents.get(parent); - children.add(child); - } + parents.putIfAbsent(parent, new HashSet<>()); + parents.get(parent).add(child); } // now parents is a hash of all of the IDs of groups that are parents @@ -714,27 +703,42 @@ protected void rethinkGroupCache(Context context, boolean flushQueries) throws S parent.getValue().addAll(myChildren); } - // empty out group2groupcache table - group2GroupCacheDAO.deleteAll(context); - - // write out new one + // write out new cache IN MEMORY ONLY and returns it + Set> newCache = new HashSet<>(); for (Map.Entry> parent : parents.entrySet()) { UUID key = parent.getKey(); - for (UUID child : parent.getValue()) { + newCache.add(Pair.of(key, child)); + } + } + return newCache; + } - Group parentGroup = find(context, key); - Group childGroup = find(context, child); + /** + * Regenerate the group cache AKA the group2groupcache table in the database - + * meant to be called when a group is added or removed from another group + * + * @param context The relevant DSpace Context. + * @param flushQueries flushQueries Flush all pending queries + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + protected void rethinkGroupCache(Context context, boolean flushQueries) throws SQLException { + // current cache in the database + Set> oldCache = group2GroupCacheDAO.getCache(context); - if (parentGroup != null && childGroup != null && group2GroupCacheDAO - .find(context, parentGroup, childGroup) == null) { - Group2GroupCache group2GroupCache = group2GroupCacheDAO.create(context, new Group2GroupCache()); - group2GroupCache.setParent(parentGroup); - group2GroupCache.setChild(childGroup); - group2GroupCacheDAO.save(context, group2GroupCache); - } - } + // correct cache, computed from the Group table + Set> newCache = computeNewCache(context, flushQueries); + + SetUtils.SetView> toDelete = SetUtils.difference(oldCache, newCache); + SetUtils.SetView> toCreate = SetUtils.difference(newCache, oldCache); + + for (Pair pair : toDelete ) { + group2GroupCacheDAO.deleteFromCache(context, pair.getLeft(), pair.getRight()); + } + + for (Pair pair : toCreate ) { + group2GroupCacheDAO.addToCache(context, pair.getLeft(), pair.getRight()); } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/Group2GroupCacheDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/Group2GroupCacheDAO.java index 7db569a59e2b..d41d52c7e618 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/Group2GroupCacheDAO.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/Group2GroupCacheDAO.java @@ -9,7 +9,10 @@ import java.sql.SQLException; import java.util.List; +import java.util.Set; +import java.util.UUID; +import org.apache.commons.lang3.tuple.Pair; import org.dspace.core.Context; import org.dspace.core.GenericDAO; import org.dspace.eperson.Group; @@ -25,13 +28,74 @@ */ public interface Group2GroupCacheDAO extends GenericDAO { - public List findByParent(Context context, Group group) throws SQLException; + /** + * Returns the current cache table as a set of UUID pairs. + * @param context The relevant DSpace Context. + * @return Set of UUID pairs, where the first element is the parent UUID and the second one is the child UUID. + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + Set> getCache(Context context) throws SQLException; - public List findByChildren(Context context, Iterable groups) throws SQLException; + /** + * Returns all cache entities that are children of a given parent Group entity. + * @param context The relevant DSpace Context. + * @param group Parent group to perform the search. + * @return List of cached groups that are children of the parent group. + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + List findByParent(Context context, Group group) throws SQLException; - public Group2GroupCache findByParentAndChild(Context context, Group parent, Group child) throws SQLException; + /** + * Returns all cache entities that are parents of at least one group from a children groups list. + * @param context The relevant DSpace Context. + * @param groups Children groups to perform the search. + * @return List of cached groups that are parents of at least one group from the children groups list. + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + List findByChildren(Context context, Iterable groups) throws SQLException; - public Group2GroupCache find(Context context, Group parent, Group child) throws SQLException; + /** + * Returns the cache entity given specific parent and child groups. + * @param context The relevant DSpace Context. + * @param parent Parent group. + * @param child Child gruoup. + * @return Cached group. + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + Group2GroupCache findByParentAndChild(Context context, Group parent, Group child) throws SQLException; - public void deleteAll(Context context) throws SQLException; + /** + * Returns the cache entity given specific parent and child groups. + * @param context The relevant DSpace Context. + * @param parent Parent group. + * @param child Child gruoup. + * @return Cached group. + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + Group2GroupCache find(Context context, Group parent, Group child) throws SQLException; + + /** + * Completely deletes the current cache table. + * @param context The relevant DSpace Context. + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + void deleteAll(Context context) throws SQLException; + + /** + * Deletes a specific cache row given parent and child groups UUIDs. + * @param context The relevant DSpace Context. + * @param parent Parent group UUID. + * @param child Child group UUID. + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + void deleteFromCache(Context context, UUID parent, UUID child) throws SQLException; + + /** + * Adds a single row to the cache table given parent and child groups UUIDs. + * @param context The relevant DSpace Context. + * @param parent Parent group UUID. + * @param child Child group UUID. + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + void addToCache(Context context, UUID parent, UUID child) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/Group2GroupCacheDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/Group2GroupCacheDAOImpl.java index 1cd359188ca3..adbd776ffab6 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/Group2GroupCacheDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/Group2GroupCacheDAOImpl.java @@ -8,14 +8,18 @@ package org.dspace.eperson.dao.impl; import java.sql.SQLException; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; +import java.util.Set; +import java.util.UUID; import jakarta.persistence.Query; import jakarta.persistence.criteria.CriteriaBuilder; import jakarta.persistence.criteria.CriteriaQuery; import jakarta.persistence.criteria.Predicate; import jakarta.persistence.criteria.Root; +import org.apache.commons.lang3.tuple.Pair; import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.Context; import org.dspace.eperson.Group; @@ -35,6 +39,16 @@ protected Group2GroupCacheDAOImpl() { super(); } + @Override + public Set> getCache(Context context) throws SQLException { + Query query = createQuery( + context, + "SELECT new org.apache.commons.lang3.tuple.ImmutablePair(g.parent.id, g.child.id) FROM Group2GroupCache g" + ); + List> results = query.getResultList(); + return new HashSet>(results); + } + @Override public List findByParent(Context context, Group group) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); @@ -90,4 +104,24 @@ public Group2GroupCache find(Context context, Group parent, Group child) throws public void deleteAll(Context context) throws SQLException { createQuery(context, "delete from Group2GroupCache").executeUpdate(); } + + @Override + public void deleteFromCache(Context context, UUID parent, UUID child) throws SQLException { + Query query = getHibernateSession(context).createNativeQuery( + "delete from group2groupcache g WHERE g.parent_id = :parent AND g.child_id = :child" + ); + query.setParameter("parent", parent); + query.setParameter("child", child); + query.executeUpdate(); + } + + @Override + public void addToCache(Context context, UUID parent, UUID child) throws SQLException { + Query query = getHibernateSession(context).createNativeQuery( + "insert into group2groupcache (parent_id, child_id) VALUES (:parent, :child)" + ); + query.setParameter("parent", parent); + query.setParameter("child", child); + query.executeUpdate(); + } } diff --git a/dspace-api/src/main/java/org/dspace/google/GoogleAccount.java b/dspace-api/src/main/java/org/dspace/google/GoogleAccount.java deleted file mode 100644 index a24c02a2e1c3..000000000000 --- a/dspace-api/src/main/java/org/dspace/google/GoogleAccount.java +++ /dev/null @@ -1,144 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ - -package org.dspace.google; - -import java.io.File; -import java.util.HashSet; -import java.util.Set; - -import com.google.api.client.auth.oauth2.Credential; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; -import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; -import com.google.api.client.http.HttpTransport; -import com.google.api.client.json.JsonFactory; -import com.google.api.client.json.jackson2.JacksonFactory; -import com.google.api.services.analytics.Analytics; -import com.google.api.services.analytics.AnalyticsScopes; -import org.apache.logging.log4j.Logger; -import org.dspace.services.factory.DSpaceServicesFactory; - -/** - * User: Robin Taylor - * Date: 11/07/2014 - * Time: 13:23 - */ - -public class GoogleAccount { - - // Read from config - private String applicationName; - private String tableId; - private String emailAddress; - private String certificateLocation; - - // Created from factories - private JsonFactory jsonFactory; - private HttpTransport httpTransport; - - // The Google stuff - private Credential credential; - private Analytics client; - - private volatile static GoogleAccount uniqueInstance; - - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(GoogleAccount.class); - - - private GoogleAccount() { - applicationName = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("google-analytics.application.name"); - tableId = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("google-analytics.table.id"); - emailAddress = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("google-analytics.account.email"); - certificateLocation = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("google-analytics.certificate.location"); - - jsonFactory = JacksonFactory.getDefaultInstance(); - - try { - httpTransport = GoogleNetHttpTransport.newTrustedTransport(); - credential = authorize(); - } catch (Exception e) { - throw new RuntimeException("Error initialising Google Analytics client", e); - } - - // Create an Analytics instance - client = new Analytics.Builder(httpTransport, jsonFactory, credential).setApplicationName(applicationName) - .build(); - - log.info("Google Analytics client successfully initialised"); - } - - public static GoogleAccount getInstance() { - if (uniqueInstance == null) { - synchronized (GoogleAccount.class) { - if (uniqueInstance == null) { - uniqueInstance = new GoogleAccount(); - } - } - } - - return uniqueInstance; - } - - private Credential authorize() throws Exception { - Set scopes = new HashSet(); - scopes.add(AnalyticsScopes.ANALYTICS); - scopes.add(AnalyticsScopes.ANALYTICS_EDIT); - scopes.add(AnalyticsScopes.ANALYTICS_MANAGE_USERS); - scopes.add(AnalyticsScopes.ANALYTICS_PROVISION); - scopes.add(AnalyticsScopes.ANALYTICS_READONLY); - - credential = new GoogleCredential.Builder() - .setTransport(httpTransport) - .setJsonFactory(jsonFactory) - .setServiceAccountId(emailAddress) - .setServiceAccountScopes(scopes) - .setServiceAccountPrivateKeyFromP12File(new File(certificateLocation)) - .build(); - - return credential; - } - - - public String getApplicationName() { - return applicationName; - } - - public String getTableId() { - return tableId; - } - - public String getEmailAddress() { - return emailAddress; - } - - public String getCertificateLocation() { - return certificateLocation; - } - - public JsonFactory getJsonFactory() { - return jsonFactory; - } - - public HttpTransport getHttpTransport() { - return httpTransport; - } - - public Credential getCredential() { - return credential; - } - - public Analytics getClient() { - return client; - } - -} - diff --git a/dspace-api/src/main/java/org/dspace/google/GoogleQueryManager.java b/dspace-api/src/main/java/org/dspace/google/GoogleQueryManager.java deleted file mode 100644 index 2719aef04da4..000000000000 --- a/dspace-api/src/main/java/org/dspace/google/GoogleQueryManager.java +++ /dev/null @@ -1,49 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ - -package org.dspace.google; - -import java.io.IOException; - -import com.google.api.services.analytics.model.GaData; - - -/** - * User: Robin Taylor - * Date: 20/08/2014 - * Time: 09:26 - */ -public class GoogleQueryManager { - - public GaData getPageViews(String startDate, String endDate, String handle) throws IOException { - return GoogleAccount.getInstance().getClient().data().ga().get( - GoogleAccount.getInstance().getTableId(), - startDate, - endDate, - "ga:pageviews") // Metrics. - .setDimensions("ga:year,ga:month") - .setSort("-ga:year,-ga:month") - .setFilters("ga:pagePath=~/handle/" + handle + "$") - .execute(); - } - - public GaData getBitstreamDownloads(String startDate, String endDate, String handle) throws IOException { - return GoogleAccount.getInstance().getClient().data().ga().get( - GoogleAccount.getInstance().getTableId(), - startDate, - endDate, - "ga:totalEvents") // Metrics. - .setDimensions("ga:year,ga:month") - .setSort("-ga:year,-ga:month") - .setFilters( - "ga:eventCategory==bitstream;ga:eventAction==download;ga:pagePath=~" + handle + "/") - .execute(); - } - -} - diff --git a/dspace-api/src/main/java/org/dspace/google/GoogleRecorderEventListener.java b/dspace-api/src/main/java/org/dspace/google/GoogleRecorderEventListener.java deleted file mode 100644 index fb4e9c04de5b..000000000000 --- a/dspace-api/src/main/java/org/dspace/google/GoogleRecorderEventListener.java +++ /dev/null @@ -1,201 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ - -package org.dspace.google; - -import java.io.IOException; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; - -import jakarta.servlet.http.HttpServletRequest; -import org.apache.commons.lang3.StringUtils; -import org.apache.http.NameValuePair; -import org.apache.http.client.entity.UrlEncodedFormEntity; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.apache.http.message.BasicNameValuePair; -import org.apache.logging.log4j.Logger; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.core.Constants; -import org.dspace.service.ClientInfoService; -import org.dspace.services.ConfigurationService; -import org.dspace.services.model.Event; -import org.dspace.usage.AbstractUsageEventListener; -import org.dspace.usage.UsageEvent; -import org.springframework.beans.factory.annotation.Autowired; - - -/** - * User: Robin Taylor - * Date: 14/08/2014 - * Time: 10:05 - * - * Notify Google Analytics of... well anything we want really. - * @deprecated Use org.dspace.google.GoogleAsyncEventListener instead - */ -@Deprecated -public class GoogleRecorderEventListener extends AbstractUsageEventListener { - - private String analyticsKey; - private CloseableHttpClient httpclient; - private String GoogleURL = "https://www.google-analytics.com/collect"; - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(GoogleRecorderEventListener.class); - - protected ContentServiceFactory contentServiceFactory; - protected ConfigurationService configurationService; - protected ClientInfoService clientInfoService; - - public GoogleRecorderEventListener() { - // httpclient is threadsafe so we only need one. - httpclient = HttpClients.createDefault(); - } - - @Autowired - public void setContentServiceFactory(ContentServiceFactory contentServiceFactory) { - this.contentServiceFactory = contentServiceFactory; - } - - @Autowired - public void setConfigurationService(ConfigurationService configurationService) { - this.configurationService = configurationService; - } - - @Autowired - public void setClientInfoService(ClientInfoService clientInfoService) { - this.clientInfoService = clientInfoService; - } - - @Override - public void receiveEvent(Event event) { - if ((event instanceof UsageEvent)) { - log.debug("Usage event received " + event.getName()); - - // This is a wee bit messy but these keys should be combined in future. - analyticsKey = configurationService.getProperty("google.analytics.key"); - - if (StringUtils.isNotBlank(analyticsKey)) { - try { - UsageEvent ue = (UsageEvent) event; - - if (ue.getAction() == UsageEvent.Action.VIEW) { - if (ue.getObject().getType() == Constants.BITSTREAM) { - logEvent(ue, "bitstream", "download"); - - // Note: I've left this commented out code here to show how we could record page views - // as events, - // but since they are already taken care of by the Google Analytics Javascript there is - // not much point. - - //} else if (ue.getObject().getType() == Constants.ITEM) { - // logEvent(ue, "item", "view"); - //} else if (ue.getObject().getType() == Constants.COLLECTION) { - // logEvent(ue, "collection", "view"); - //} else if (ue.getObject().getType() == Constants.COMMUNITY) { - // logEvent(ue, "community", "view"); - } - } - } catch (Exception e) { - log.error(e.getMessage()); - } - } - } - } - - private void logEvent(UsageEvent ue, String category, String action) throws IOException, SQLException { - HttpPost httpPost = new HttpPost(GoogleURL); - - List nvps = new ArrayList(); - nvps.add(new BasicNameValuePair("v", "1")); - nvps.add(new BasicNameValuePair("tid", analyticsKey)); - - // Client Id, should uniquely identify the user or device. If we have a session id for the user - // then lets use it, else generate a UUID. - if (ue.getRequest().getSession(false) != null) { - nvps.add(new BasicNameValuePair("cid", ue.getRequest().getSession().getId())); - } else { - nvps.add(new BasicNameValuePair("cid", UUID.randomUUID().toString())); - } - - nvps.add(new BasicNameValuePair("t", "event")); - nvps.add(new BasicNameValuePair("uip", getIPAddress(ue.getRequest()))); - nvps.add(new BasicNameValuePair("ua", ue.getRequest().getHeader("USER-AGENT"))); - nvps.add(new BasicNameValuePair("dr", ue.getRequest().getHeader("referer"))); - nvps.add(new BasicNameValuePair("dp", ue.getRequest().getRequestURI())); - nvps.add(new BasicNameValuePair("dt", getObjectName(ue))); - nvps.add(new BasicNameValuePair("ec", category)); - nvps.add(new BasicNameValuePair("ea", action)); - - if (ue.getObject().getType() == Constants.BITSTREAM) { - // Bitstream downloads may occasionally be for collection or community images, so we need to label them - // with the parent object type. - nvps.add(new BasicNameValuePair("el", getParentType(ue))); - } - - httpPost.setEntity(new UrlEncodedFormEntity(nvps)); - - try (CloseableHttpResponse response2 = httpclient.execute(httpPost)) { - // I can't find a list of what are acceptable responses, so I log the response but take no action. - log.debug("Google Analytics response is " + response2.getStatusLine()); - } - - log.debug("Posted to Google Analytics - " + ue.getRequest().getRequestURI()); - } - - private String getParentType(UsageEvent ue) { - try { - int parentType = contentServiceFactory.getDSpaceObjectService(ue.getObject()) - .getParentObject(ue.getContext(), ue.getObject()).getType(); - if (parentType == Constants.ITEM) { - return "item"; - } else if (parentType == Constants.COLLECTION) { - return "collection"; - } else if (parentType == Constants.COMMUNITY) { - return "community"; - } - } catch (SQLException e) { - // This shouldn't merit interrupting the user's transaction so log the error and continue. - log.error( - "Error in Google Analytics recording - can't determine ParentObjectType for bitstream " + ue.getObject() - .getID()); - e.printStackTrace(); - } - - return null; - } - - private String getObjectName(UsageEvent ue) { - try { - if (ue.getObject().getType() == Constants.BITSTREAM) { - // For a bitstream download we really want to know the title of the owning item rather than the - // bitstream name. - return contentServiceFactory.getDSpaceObjectService(ue.getObject()) - .getParentObject(ue.getContext(), ue.getObject()).getName(); - } else { - return ue.getObject().getName(); - } - } catch (SQLException e) { - // This shouldn't merit interrupting the user's transaction so log the error and continue. - log.error( - "Error in Google Analytics recording - can't determine ParentObjectName for bitstream " + ue.getObject() - .getID()); - e.printStackTrace(); - } - - return null; - - } - - private String getIPAddress(HttpServletRequest request) { - return clientInfoService.getClientIp(request); - } - -} diff --git a/dspace-api/src/main/java/org/dspace/identifier/doi/DOIOrganiser.java b/dspace-api/src/main/java/org/dspace/identifier/doi/DOIOrganiser.java index b03af68b42ab..0ee64f94b799 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/doi/DOIOrganiser.java +++ b/dspace-api/src/main/java/org/dspace/identifier/doi/DOIOrganiser.java @@ -577,7 +577,8 @@ public void update(DOI doiRow) { } } catch (IdentifierException ex) { if (!(ex instanceof DOIIdentifierException)) { - LOG.error("It wasn't possible to register the identifier online. ", ex); + LOG.error("Registering DOI {} for object {}: the registrar returned an error.", + doiRow.getDoi(), dso.getID(), ex); } DOIIdentifierException doiIdentifierException = (DOIIdentifierException) ex; diff --git a/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java b/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java index 23af904f2c71..e56a82942369 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java +++ b/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java @@ -461,6 +461,10 @@ public void reserveDOI(Context context, DSpaceObject dso, String doi) log.warn("While reserving the DOI {}, we got a http status code " + "{} and the message \"{}\".", doi, Integer.toString(resp.statusCode), resp.getContent()); + Format format = Format.getCompactFormat(); + format.setEncoding("UTF-8"); + XMLOutputter xout = new XMLOutputter(format); + log.info("We send the following XML:\n{}", xout.outputString(root)); throw new DOIIdentifierException("Unable to parse an answer from " + "DataCite API. Please have a look into DSpace logs.", DOIIdentifierException.BAD_ANSWER); @@ -632,6 +636,14 @@ protected DataCiteResponse sendGetRequest(String doi, String path) return sendHttpRequest(httpget, doi); } + /** + * Send a DataCite metadata document to the registrar. + * + * @param doi identify the object. + * @param metadataRoot describe the object. The root element of the document. + * @return the registrar's response. + * @throws DOIIdentifierException passed through. + */ protected DataCiteResponse sendMetadataPostRequest(String doi, Element metadataRoot) throws DOIIdentifierException { Format format = Format.getCompactFormat(); @@ -640,6 +652,14 @@ protected DataCiteResponse sendMetadataPostRequest(String doi, Element metadataR return sendMetadataPostRequest(doi, xout.outputString(new Document(metadataRoot))); } + /** + * Send a DataCite metadata document to the registrar. + * + * @param doi identify the object. + * @param metadata describe the object. + * @return the registrar's response. + * @throws DOIIdentifierException passed through. + */ protected DataCiteResponse sendMetadataPostRequest(String doi, String metadata) throws DOIIdentifierException { // post mds/metadata/ @@ -687,7 +707,7 @@ protected DataCiteResponse sendMetadataPostRequest(String doi, String metadata) * properties such as request URI and method type. * @param doi DOI string to operate on * @return response from DataCite - * @throws DOIIdentifierException if DOI error + * @throws DOIIdentifierException if registrar returns an error. */ protected DataCiteResponse sendHttpRequest(HttpUriRequest req, String doi) throws DOIIdentifierException { diff --git a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java index f242ec7acdad..ab5147221cfc 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java @@ -45,14 +45,15 @@ import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; -import org.dspace.eperson.service.EPersonService; import org.dspace.scripts.service.ProcessService; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; /** * The implementation for the {@link ProcessService} class */ -public class ProcessServiceImpl implements ProcessService { +public class ProcessServiceImpl implements ProcessService, InitializingBean { private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ProcessService.class); @@ -72,7 +73,34 @@ public class ProcessServiceImpl implements ProcessService { private MetadataFieldService metadataFieldService; @Autowired - private EPersonService ePersonService; + private ConfigurationService configurationService; + + @Override + public void afterPropertiesSet() throws Exception { + try { + Context context = new Context(); + + // Processes that were running or scheduled when tomcat crashed, should be cleaned up during startup. + List processesToBeFailed = findByStatusAndCreationTimeOlderThan( + context, List.of(ProcessStatus.RUNNING, ProcessStatus.SCHEDULED), new Date()); + for (Process process : processesToBeFailed) { + context.setCurrentUser(process.getEPerson()); + // Fail the process. + log.info("Process with ID {} did not complete before tomcat shutdown, failing it now.", + process.getID()); + fail(context, process); + // But still attach its log to the process. + appendLog(process.getID(), process.getName(), + "Process did not complete before tomcat shutdown.", + ProcessLogLevel.ERROR); + createLogBitstream(context, process); + } + + context.complete(); + } catch (Exception e) { + log.error("Unable to clean up Processes: ", e); + } + } @Override public Process create(Context context, EPerson ePerson, String scriptName, @@ -293,8 +321,8 @@ public int countSearch(Context context, ProcessQueryParameterContainer processQu @Override public void appendLog(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) throws IOException { - File tmpDir = FileUtils.getTempDirectory(); - File tempFile = new File(tmpDir, scriptName + processId + ".log"); + File logsDir = getLogsDirectory(); + File tempFile = new File(logsDir, processId + "-" + scriptName + ".log"); FileWriter out = new FileWriter(tempFile, true); try { try (BufferedWriter writer = new BufferedWriter(out)) { @@ -309,12 +337,15 @@ public void appendLog(int processId, String scriptName, String output, ProcessLo @Override public void createLogBitstream(Context context, Process process) throws IOException, SQLException, AuthorizeException { - File tmpDir = FileUtils.getTempDirectory(); - File tempFile = new File(tmpDir, process.getName() + process.getID() + ".log"); - FileInputStream inputStream = FileUtils.openInputStream(tempFile); - appendFile(context, process, inputStream, Process.OUTPUT_TYPE, process.getName() + process.getID() + ".log"); - inputStream.close(); - tempFile.delete(); + File logsDir = getLogsDirectory(); + File tempFile = new File(logsDir, process.getID() + "-" + process.getName() + ".log"); + if (tempFile.exists()) { + FileInputStream inputStream = FileUtils.openInputStream(tempFile); + appendFile(context, process, inputStream, Process.OUTPUT_TYPE, + process.getID() + "-" + process.getName() + ".log"); + inputStream.close(); + tempFile.delete(); + } } @Override @@ -343,4 +374,15 @@ private String formatLogLine(int processId, String scriptName, String output, Pr return sb.toString(); } + private File getLogsDirectory() { + String pathStr = configurationService.getProperty("dspace.dir") + + File.separator + "log" + File.separator + "processes"; + File logsDir = new File(pathStr); + if (!logsDir.exists()) { + if (!logsDir.mkdirs()) { + throw new RuntimeException("Couldn't create [dspace.dir]/log/processes/ directory."); + } + } + return logsDir; + } } diff --git a/dspace-api/src/main/java/org/dspace/usage/package-info.java b/dspace-api/src/main/java/org/dspace/usage/package-info.java index 5883bcf358f4..26984ae0caa0 100644 --- a/dspace-api/src/main/java/org/dspace/usage/package-info.java +++ b/dspace-api/src/main/java/org/dspace/usage/package-info.java @@ -25,7 +25,7 @@ * {@code EventService}, as with the stock listeners. *

* - * @see org.dspace.google.GoogleRecorderEventListener + * @see org.dspace.google.GoogleAsyncEventListener * @see org.dspace.statistics.SolrLoggerUsageEventListener */ diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ScoreReviewAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ScoreReviewAction.java index a8ed4fd3dae9..50f338499282 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ScoreReviewAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ScoreReviewAction.java @@ -8,6 +8,7 @@ package org.dspace.xmlworkflow.state.actions.processingaction; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -20,6 +21,8 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.content.MetadataFieldName; import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.xmlworkflow.service.WorkflowRequirementsService; import org.dspace.xmlworkflow.state.Step; import org.dspace.xmlworkflow.state.actions.ActionAdvancedInfo; @@ -34,6 +37,9 @@ public class ScoreReviewAction extends ProcessingAction { private static final Logger log = LogManager.getLogger(ScoreReviewAction.class); + private final ConfigurationService configurationService + = DSpaceServicesFactory.getInstance().getConfigurationService(); + // Option(s) public static final String SUBMIT_SCORE = "submit_score"; @@ -114,7 +120,14 @@ private boolean checkRequestValid(int score, String review) { @Override public List getOptions() { - return List.of(SUBMIT_SCORE, RETURN_TO_POOL); + List options = new ArrayList<>(); + options.add(SUBMIT_SCORE); + if (configurationService.getBooleanProperty("workflow.reviewer.file-edit", false)) { + options.add(SUBMIT_EDIT_METADATA); + } + options.add(RETURN_TO_POOL); + + return options; } @Override diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java index 64e0957b65b7..c46fa851e4f1 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java @@ -21,6 +21,8 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.core.Context; import org.dspace.eperson.EPerson; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.workflow.WorkflowException; import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; import org.dspace.xmlworkflow.state.Step; @@ -40,6 +42,9 @@ public class SingleUserReviewAction extends ProcessingAction { private static final Logger log = LogManager.getLogger(SingleUserReviewAction.class); + private final ConfigurationService configurationService + = DSpaceServicesFactory.getInstance().getConfigurationService(); + public static final int OUTCOME_REJECT = 1; protected static final String SUBMIT_DECLINE_TASK = "submit_decline_task"; @@ -95,6 +100,9 @@ public ActionResult processAccept(Context c, XmlWorkflowItem wfi) throws SQLExce public List getOptions() { List options = new ArrayList<>(); options.add(SUBMIT_APPROVE); + if (configurationService.getBooleanProperty("workflow.reviewer.file-edit", false)) { + options.add(SUBMIT_EDIT_METADATA); + } options.add(SUBMIT_REJECT); options.add(SUBMIT_DECLINE_TASK); return options; diff --git a/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/countries.xml b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/countries.xml new file mode 100644 index 000000000000..078e8bfa3865 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/countries.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/countries_de.xml b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/countries_de.xml new file mode 100644 index 000000000000..b4bbf0b1e03c --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/controlled-vocabularies/countries_de.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index b44f319a35f6..a85c1114f876 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -175,6 +175,9 @@ authority.controlled.dspace.object.owner = true webui.browse.link.1 = author:dc.contributor.* webui.browse.link.2 = subject:dc.subject.* +# Configuration required for testing the controlled vocabulary functionality, which is configured using properties +vocabulary.plugin.countries.hierarchy.store=false +vocabulary.plugin.countries.storeIDs=true # Enable duplicate detection for tests duplicate.enable = true diff --git a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportSearchIT.java b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportSearchIT.java index 3a972692efeb..63a87a48f554 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportSearchIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportSearchIT.java @@ -23,7 +23,8 @@ import com.google.common.io.Files; import com.opencsv.CSVReader; import com.opencsv.exceptions.CsvException; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.app.launcher.ScriptLauncher; import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; @@ -51,7 +52,7 @@ public class MetadataExportSearchIT extends AbstractIntegrationTestWithDatabase private Item[] itemsSubject2 = new Item[numberItemsSubject2]; private String filename; private Collection collection; - private Logger logger = Logger.getLogger(MetadataExportSearchIT.class); + private Logger logger = LogManager.getLogger(MetadataExportSearchIT.class); private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); private SearchService searchService; diff --git a/dspace-api/src/test/java/org/dspace/builder/OrcidHistoryBuilder.java b/dspace-api/src/test/java/org/dspace/builder/OrcidHistoryBuilder.java index 199f412f8506..d811d03f5358 100644 --- a/dspace-api/src/test/java/org/dspace/builder/OrcidHistoryBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/OrcidHistoryBuilder.java @@ -11,7 +11,8 @@ import java.sql.SQLException; import java.util.Date; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.orcid.OrcidHistory; @@ -24,7 +25,7 @@ */ public class OrcidHistoryBuilder extends AbstractBuilder { - private static final Logger log = Logger.getLogger(OrcidHistoryBuilder.class); + private static final Logger log = LogManager.getLogger(OrcidHistoryBuilder.class); private OrcidHistory orcidHistory; diff --git a/dspace-api/src/test/java/org/dspace/content/authority/DSpaceControlledVocabularyTest.java b/dspace-api/src/test/java/org/dspace/content/authority/DSpaceControlledVocabularyTest.java index 255b070e5eac..43bd20cc1553 100644 --- a/dspace-api/src/test/java/org/dspace/content/authority/DSpaceControlledVocabularyTest.java +++ b/dspace-api/src/test/java/org/dspace/content/authority/DSpaceControlledVocabularyTest.java @@ -89,6 +89,145 @@ public void testGetMatches() throws IOException, ClassNotFoundException { assertEquals("north 40", result.values[0].value); } + /** + * Test of getMatches method of class + * DSpaceControlledVocabulary using a localized controlled vocabulary with no locale (fallback to default) + * @throws java.lang.ClassNotFoundException passed through. + */ + @Test + public void testGetMatchesNoLocale() throws ClassNotFoundException { + final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority"; + + String idValue = "DZA"; + String labelPart = "Alge"; + int start = 0; + int limit = 10; + // This "countries" Controlled Vocab is included in TestEnvironment data + // (under /src/test/data/dspaceFolder/) and it should be auto-loaded + // by test configs in /src/test/data/dspaceFolder/config/local.cfg + DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary) + CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE), + "countries"); + assertNotNull(instance); + Choices result = instance.getMatches(labelPart, start, limit, null); + assertEquals(idValue, result.values[0].value); + assertEquals("Algeria", result.values[0].label); + } + + /** + * Test of getBestMatch method of class + * DSpaceControlledVocabulary using a localized controlled vocabulary with no locale (fallback to default) + * @throws java.lang.ClassNotFoundException passed through. + */ + @Test + public void testGetBestMatchIdValueNoLocale() throws ClassNotFoundException { + final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority"; + + String idValue = "DZA"; + // This "countries" Controlled Vocab is included in TestEnvironment data + // (under /src/test/data/dspaceFolder/) and it should be auto-loaded + // by test configs in /src/test/data/dspaceFolder/config/local.cfg + DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary) + CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE), + "countries"); + assertNotNull(instance); + Choices result = instance.getBestMatch(idValue, null); + assertEquals(idValue, result.values[0].value); + assertEquals("Algeria", result.values[0].label); + } + + /** + * Test of getMatches method of class + * DSpaceControlledVocabulary using a localized controlled vocabulary with valid locale parameter (localized + * label returned) + */ + @Test + public void testGetMatchesGermanLocale() throws ClassNotFoundException { + final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority"; + + String idValue = "DZA"; + String labelPart = "Alge"; + int start = 0; + int limit = 10; + // This "countries" Controlled Vocab is included in TestEnvironment data + // (under /src/test/data/dspaceFolder/) and it should be auto-loaded + // by test configs in /src/test/data/dspaceFolder/config/local.cfg + DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary) + CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE), + "countries"); + assertNotNull(instance); + Choices result = instance.getMatches(labelPart, start, limit, "de"); + assertEquals(idValue, result.values[0].value); + assertEquals("Algerien", result.values[0].label); + } + + /** + * Test of getBestMatch method of class + * DSpaceControlledVocabulary using a localized controlled vocabulary with valid locale parameter (localized + * label returned) + */ + @Test + public void testGetBestMatchIdValueGermanLocale() throws ClassNotFoundException { + final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority"; + + String idValue = "DZA"; + // This "countries" Controlled Vocab is included in TestEnvironment data + // (under /src/test/data/dspaceFolder/) and it should be auto-loaded + // by test configs in /src/test/data/dspaceFolder/config/local.cfg + DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary) + CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE), + "countries"); + assertNotNull(instance); + Choices result = instance.getBestMatch(idValue, "de"); + assertEquals(idValue, result.values[0].value); + assertEquals("Algerien", result.values[0].label); + } + + /** + * Test of getChoice method of class + * DSpaceControlledVocabulary using a localized controlled vocabulary with no locale (fallback to default) + * @throws java.lang.ClassNotFoundException passed through. + */ + @Test + public void testGetChoiceNoLocale() throws ClassNotFoundException { + final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority"; + + String idValue = "DZA"; + // This "countries" Controlled Vocab is included in TestEnvironment data + // (under /src/test/data/dspaceFolder/) and it should be auto-loaded + // by test configs in /src/test/data/dspaceFolder/config/local.cfg + DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary) + CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE), + "countries"); + assertNotNull(instance); + Choice result = instance.getChoice(idValue, null); + assertEquals(idValue, result.value); + assertEquals("Algeria", result.label); + } + + /** + * Test of getChoice method of class + * DSpaceControlledVocabulary using a localized controlled vocabulary with valid locale parameter (localized + * label returned) + * @throws java.lang.ClassNotFoundException passed through. + */ + @Test + public void testGetChoiceGermanLocale() throws ClassNotFoundException { + final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority"; + + String idValue = "DZA"; + // This "countries" Controlled Vocab is included in TestEnvironment data + // (under /src/test/data/dspaceFolder/) and it should be auto-loaded + // by test configs in /src/test/data/dspaceFolder/config/local.cfg + DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary) + CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE), + "countries"); + assertNotNull(instance); + Choice result = instance.getChoice(idValue, "de"); + assertEquals(idValue, result.value); + assertEquals("Algerien", result.label); + } + /** * Test of getBestMatch method, of class DSpaceControlledVocabulary. */ diff --git a/dspace-iiif/pom.xml b/dspace-iiif/pom.xml index 98683cdc9f76..9c4c174bf39f 100644 --- a/dspace-iiif/pom.xml +++ b/dspace-iiif/pom.xml @@ -44,6 +44,11 @@ org.springframework.boot spring-boot-starter-logging + + + org.springframework + spring-jcl + @@ -106,7 +111,7 @@ de.digitalcollections.iiif iiif-apis - 0.3.10 + 0.3.11 org.javassist diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index dbfaee3f692a..9ba9ff3e2f71 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -65,9 +65,8 @@ - javax.inject - javax.inject - 1 + jakarta.inject + jakarta.inject-api @@ -80,6 +79,11 @@ org.springframework.boot spring-boot-starter-logging + + + org.springframework + spring-jcl + @@ -94,22 +98,9 @@ org.springframework.boot spring-boot-starter-web - - - org.parboiled - parboiled-java - - - - org.parboiled - parboiled-java - 1.3.1 - - org.dspace @@ -128,23 +119,6 @@ org.apache.logging.log4j log4j-api - - org.apache.logging.log4j - log4j-core - - - org.apache.logging.log4j - log4j-web - - - org.apache.logging.log4j - log4j-slf4j-impl - runtime - - - org.apache.logging.log4j - log4j-1.2-api - diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/CCElementItemCompilePlugin.java b/dspace-oai/src/main/java/org/dspace/xoai/app/CCElementItemCompilePlugin.java index 225d56a4c982..370543029d8b 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/app/CCElementItemCompilePlugin.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/CCElementItemCompilePlugin.java @@ -11,7 +11,7 @@ import com.lyncode.xoai.dataprovider.xml.xoai.Element; import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; -import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang3.StringUtils; import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.license.factory.LicenseServiceFactory; diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java b/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java index 25cc1ee3655f..ad138ca9f2ad 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java @@ -9,7 +9,7 @@ import static com.lyncode.xoai.dataprovider.core.Granularity.Second; import static java.util.Objects.nonNull; -import static org.apache.commons.lang.StringUtils.EMPTY; +import static org.apache.commons.lang3.StringUtils.EMPTY; import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM; import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START; import static org.dspace.xoai.util.ItemUtils.retrieveMetadata; diff --git a/dspace-oai/src/main/java/org/dspace/xoai/filter/ItemsWithBitstreamFilter.java b/dspace-oai/src/main/java/org/dspace/xoai/filter/ItemsWithBitstreamFilter.java index 3599c5b9e168..9bf1c65dc9d9 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/filter/ItemsWithBitstreamFilter.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/filter/ItemsWithBitstreamFilter.java @@ -9,8 +9,8 @@ import java.sql.SQLException; -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.content.Bundle; import org.dspace.content.Item; import org.dspace.handle.factory.HandleServiceFactory; diff --git a/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java b/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java index 0f48824159c2..0f7ffde0bd00 100644 --- a/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java +++ b/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java @@ -13,13 +13,14 @@ import static org.hamcrest.MatcherAssert.assertThat; import java.io.InputStream; +import java.nio.charset.Charset; import javax.xml.transform.TransformerFactory; import javax.xml.transform.stream.StreamSource; import com.lyncode.xoai.util.XSLPipeline; +import org.apache.commons.io.IOUtils; import org.dspace.xoai.tests.support.XmlMatcherBuilder; import org.junit.Test; -import org.parboiled.common.FileUtils; public class PipelineTest { private static TransformerFactory factory = TransformerFactory.newInstance(); @@ -28,9 +29,9 @@ public class PipelineTest { public void pipelineTest() throws Exception { InputStream input = PipelineTest.class.getClassLoader().getResourceAsStream("item.xml"); InputStream xslt = PipelineTest.class.getClassLoader().getResourceAsStream("oai_dc.xsl"); - String output = FileUtils.readAllText(new XSLPipeline(input, true) - .apply(factory.newTemplates(new StreamSource(xslt))) - .getTransformed()); + String output = IOUtils.toString(new XSLPipeline(input, true) + .apply(factory.newTemplates(new StreamSource(xslt))) + .getTransformed(), Charset.defaultCharset()); assertThat(output, oai_dc().withXPath("/oai_dc:dc/dc:title", equalTo("Teste"))); diff --git a/dspace-rdf/pom.xml b/dspace-rdf/pom.xml index 45ab69b4834e..7f214dc088d4 100644 --- a/dspace-rdf/pom.xml +++ b/dspace-rdf/pom.xml @@ -67,6 +67,11 @@ org.springframework.boot spring-boot-starter-logging + + + org.springframework + spring-jcl + @@ -80,14 +85,6 @@ org.apache.logging.log4j log4j-api - - org.apache.logging.log4j - log4j-core - - - org.apache.logging.log4j - log4j-web - org.apache.commons diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index b437b07869af..b115cff73417 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -293,14 +293,6 @@ spring-expression ${spring.version} - - - @@ -468,6 +460,11 @@ org.springframework.boot spring-boot-starter-logging + + + org.springframework + spring-jcl + @@ -555,7 +552,7 @@ net.minidev json-smart - 2.5.0 + 2.5.1 @@ -593,7 +590,7 @@ org.apache.httpcomponents.client5 httpclient5 - 5.3.1 + 5.4.1 test @@ -605,6 +602,13 @@ com.jayway.jsonpath json-path + + + + net.minidev + json-smart + + com.jayway.jsonpath diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java index 968dba5c3821..39040b7b3cf3 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java @@ -12,9 +12,7 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; @@ -30,7 +28,6 @@ import org.dspace.app.rest.utils.ContextUtil; import org.dspace.app.rest.utils.RestDiscoverQueryBuilder; import org.dspace.app.rest.utils.ScopeResolver; -import org.dspace.app.util.SyndicationFeed; import org.dspace.app.util.factory.UtilServiceFactory; import org.dspace.app.util.service.OpenSearchService; import org.dspace.authorize.factory.AuthorizeServiceFactory; @@ -235,12 +232,10 @@ public void search(HttpServletRequest request, log.info("opensearch done, query=\"" + query + "\",results=" + qResults.getTotalSearchResults()); - // format and return results - Map labelMap = getLabels(request); List dsoResults = qResults.getIndexableObjects(); Document resultsDoc = openSearchService.getResultsDoc(context, format, query, (int) qResults.getTotalSearchResults(), qResults.getStart(), - qResults.getMaxResults(), container, dsoResults, labelMap); + qResults.getMaxResults(), container, dsoResults); try { Transformer xf = TransformerFactory.newInstance().newTransformer(); response.setContentType(openSearchService.getContentType(format)); @@ -310,20 +305,4 @@ private void init() { public void setOpenSearchService(OpenSearchService oSS) { openSearchService = oSS; } - - - /** - * Internal method to get labels for the returned document - */ - private Map getLabels(HttpServletRequest request) { - // TODO: get strings from translation file or configuration - Map labelMap = new HashMap(); - labelMap.put(SyndicationFeed.MSG_UNTITLED, "notitle"); - labelMap.put(SyndicationFeed.MSG_LOGO_TITLE, "logo.title"); - labelMap.put(SyndicationFeed.MSG_FEED_DESCRIPTION, "general-feed.description"); - for (String selector : SyndicationFeed.getDescriptionSelectors()) { - labelMap.put("metadata." + selector, selector); - } - return labelMap; - } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/UUIDLookupRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/UUIDLookupRestController.java index 8a35794aa193..3da84cc2d372 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/UUIDLookupRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/UUIDLookupRestController.java @@ -24,9 +24,11 @@ import org.dspace.app.rest.utils.ContextUtil; import org.dspace.app.rest.utils.DSpaceObjectUtils; import org.dspace.app.rest.utils.Utils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.DSpaceObject; +import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.discovery.SearchServiceException; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.hateoas.Link; @@ -65,6 +67,9 @@ public class UUIDLookupRestController implements InitializingBean { @Autowired private DiscoverableEndpointsService discoverableEndpointsService; + @Autowired + private AuthorizeService authorizeService; + @Autowired private ConverterService converter; @@ -85,13 +90,14 @@ public void afterPropertiesSet() throws Exception { public void getDSObyIdentifier(HttpServletRequest request, HttpServletResponse response, @RequestParam(PARAM) UUID uuid) - throws IOException, SQLException, SearchServiceException { + throws IOException, SQLException, AuthorizeException { Context context = null; try { context = ContextUtil.obtainContext(request); DSpaceObject dso = dspaceObjectUtil.findDSpaceObject(context, uuid); if (dso != null) { + authorizeService.authorizeAction(context, dso, Constants.READ); DSpaceObjectRest dsor = converter.toRest(dso, utils.obtainProjection()); URI link = linkTo(dsor.getController(), dsor.getCategory(), dsor.getTypePlural()).slash(dsor.getId()) .toUri(); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java index e9786962e0f5..d781d255df11 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java @@ -13,7 +13,8 @@ import java.util.List; import jakarta.servlet.http.HttpServletRequest; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.rest.model.PageRest; import org.dspace.app.rest.model.SearchEventRest; import org.dspace.app.rest.model.SearchResultsRest; @@ -31,7 +32,7 @@ @Component public class SearchEventConverter { /* Log4j logger */ - private static final Logger log = Logger.getLogger(SearchEventConverter.class); + private static final Logger log = LogManager.getLogger(SearchEventConverter.class); @Autowired private ScopeResolver scopeResolver; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionSectionConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionSectionConverter.java index 0391cbce7a2d..3cd263493b5d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionSectionConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionSectionConverter.java @@ -10,6 +10,7 @@ import java.sql.SQLException; import org.apache.logging.log4j.Logger; +import org.dspace.app.rest.model.ScopeEnum; import org.dspace.app.rest.model.SubmissionSectionRest; import org.dspace.app.rest.model.SubmissionVisibilityRest; import org.dspace.app.rest.model.VisibilityEnum; @@ -41,6 +42,7 @@ public SubmissionSectionRest convert(SubmissionStepConfig step, Projection proje sp.setHeader(step.getHeading()); sp.setSectionType(step.getType()); sp.setId(step.getId()); + sp.setScope(ScopeEnum.fromString(step.getScope())); sp.setVisibility(new SubmissionVisibilityRest(VisibilityEnum.fromString(step.getVisibility()), VisibilityEnum.fromString(step.getVisibilityOutside()))); return sp; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/query/SearchQueryConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/query/SearchQueryConverter.java index 4ea5e6f9df7d..087d422cdcd1 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/query/SearchQueryConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/query/SearchQueryConverter.java @@ -7,6 +7,8 @@ */ package org.dspace.app.rest.converter.query; +import static org.dspace.app.rest.model.SearchConfigurationRest.Filter.OPERATOR_QUERY; + import java.util.LinkedList; import java.util.List; @@ -16,23 +18,23 @@ import org.dspace.app.rest.parameter.SearchFilter; /** - * This method will traverse a list of SearchFilters and transform any SearchFilters with an operator - * this is equal to 'Query' into a SearchFilter that has a standard DSpace operator like 'contains' + * Utility class for transforming a list of SearchFilters. Each SearchFilter with an operator set to 'query' + * is converted into a SearchFilter with a standard DSpace operator like 'contains'. */ public class SearchQueryConverter { /** - * This method traverses the list of SearchFilters and transforms all of those that contain 'Query' + * This method traverses the list of SearchFilters and transforms all of those that with 'query' * as the operator into a standard DSpace SearchFilter * - * @param searchFilters The list of SearchFilters to be used - * @return A list of transformed SearchFilters + * @param searchFilters list of SearchFilters to be transformed + * @return list of transformed SearchFilters */ public List convert(List searchFilters) { List transformedSearchFilters = new LinkedList<>(); for (SearchFilter searchFilter : CollectionUtils.emptyIfNull(searchFilters)) { - if (StringUtils.equals(searchFilter.getOperator(), "query")) { + if (StringUtils.equals(searchFilter.getOperator(), OPERATOR_QUERY)) { SearchFilter transformedSearchFilter = convertQuerySearchFilterIntoStandardSearchFilter(searchFilter); transformedSearchFilters.add(transformedSearchFilter); } else { @@ -46,10 +48,10 @@ public List convert(List searchFilters) { /** * This method takes care of the converter of a specific SearchFilter given to it * - * @param searchFilter The SearchFilter to be transformed - * @return The transformed SearchFilter + * @param searchFilter searchFilter to be transformed + * @return transformed SearchFilter */ - public SearchFilter convertQuerySearchFilterIntoStandardSearchFilter(SearchFilter searchFilter) { + private SearchFilter convertQuerySearchFilterIntoStandardSearchFilter(SearchFilter searchFilter) { RestSearchOperator restSearchOperator = RestSearchOperator.forQuery(searchFilter.getValue()); SearchFilter transformedSearchFilter = new SearchFilter(searchFilter.getName(), restSearchOperator.getDspaceOperator(), restSearchOperator.extractValue(searchFilter.getValue())); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/scripts/handler/impl/RestDSpaceRunnableHandler.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/scripts/handler/impl/RestDSpaceRunnableHandler.java index 596ab4429093..ee67baa8ab38 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/scripts/handler/impl/RestDSpaceRunnableHandler.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/scripts/handler/impl/RestDSpaceRunnableHandler.java @@ -130,7 +130,7 @@ public void handleCompletion() { @Override public void handleException(Exception e) { - handleException(null, e); + handleException(e.getMessage(), e); } @Override diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamLinksetProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamLinksetProcessor.java index 97eb9f2a546d..349dde7b6dac 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamLinksetProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamLinksetProcessor.java @@ -10,7 +10,8 @@ import java.util.List; import jakarta.servlet.http.HttpServletRequest; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRelationType; import org.dspace.content.Bitstream; @@ -25,7 +26,7 @@ */ public class BitstreamLinksetProcessor extends BitstreamSignpostingProcessor { - private static final Logger log = Logger.getLogger(BitstreamLinksetProcessor.class); + private static final Logger log = LogManager.getLogger(BitstreamLinksetProcessor.class); private final BitstreamService bitstreamService; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamParentItemProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamParentItemProcessor.java index 32928dfa8892..c855f06784f7 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamParentItemProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamParentItemProcessor.java @@ -10,7 +10,8 @@ import java.util.List; import jakarta.servlet.http.HttpServletRequest; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRelationType; import org.dspace.content.Bitstream; @@ -28,7 +29,7 @@ */ public class BitstreamParentItemProcessor extends BitstreamSignpostingProcessor { - private static final Logger log = Logger.getLogger(BitstreamParentItemProcessor.class); + private static final Logger log = LogManager.getLogger(BitstreamParentItemProcessor.class); private final BitstreamService bitstreamService; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java index 8889a415d327..d0f170b4c55a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/bitstream/BitstreamTypeProcessor.java @@ -11,7 +11,8 @@ import jakarta.servlet.http.HttpServletRequest; import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRelationType; import org.dspace.content.Bitstream; @@ -28,7 +29,7 @@ */ public class BitstreamTypeProcessor extends BitstreamSignpostingProcessor { - private static final Logger log = Logger.getLogger(BitstreamTypeProcessor.class); + private static final Logger log = LogManager.getLogger(BitstreamTypeProcessor.class); @Autowired private SimpleMapConverter mapConverterDSpaceToSchemaOrgUri; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java index f3a9e35198a7..ebc7c46f4d23 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemAuthorProcessor.java @@ -16,7 +16,8 @@ import java.util.List; import jakarta.servlet.http.HttpServletRequest; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRelationType; import org.dspace.content.Item; @@ -37,7 +38,7 @@ public class ItemAuthorProcessor extends ItemSignpostingProcessor { /** * log4j category */ - private static final Logger log = Logger.getLogger(ItemAuthorProcessor.class); + private static final Logger log = LogManager.getLogger(ItemAuthorProcessor.class); private final ItemService itemService; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemContentBitstreamsProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemContentBitstreamsProcessor.java index 0b91e57f7b3f..65a29c2b6c98 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemContentBitstreamsProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemContentBitstreamsProcessor.java @@ -11,7 +11,8 @@ import java.util.List; import jakarta.servlet.http.HttpServletRequest; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRelationType; import org.dspace.content.Bitstream; @@ -33,7 +34,7 @@ public class ItemContentBitstreamsProcessor extends ItemSignpostingProcessor { /** * log4j category */ - private static final Logger log = Logger.getLogger(ItemContentBitstreamsProcessor.class); + private static final Logger log = LogManager.getLogger(ItemContentBitstreamsProcessor.class); public ItemContentBitstreamsProcessor(FrontendUrlService frontendUrlService) { super(frontendUrlService); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java index 20091e6d0992..a86b98f2e5d0 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemDescribedbyProcessor.java @@ -10,7 +10,8 @@ import java.util.List; import jakarta.servlet.http.HttpServletRequest; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRelationType; import org.dspace.content.Item; @@ -23,7 +24,7 @@ */ public class ItemDescribedbyProcessor extends ItemSignpostingProcessor { - private static final Logger log = Logger.getLogger(ItemDescribedbyProcessor.class); + private static final Logger log = LogManager.getLogger(ItemDescribedbyProcessor.class); private final ConfigurationService configurationService; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java index b60ee35d7fe4..6e26d8f1b225 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLicenseProcessor.java @@ -11,7 +11,8 @@ import jakarta.servlet.http.HttpServletRequest; import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRelationType; import org.dspace.content.Item; @@ -25,7 +26,7 @@ */ public class ItemLicenseProcessor extends ItemSignpostingProcessor { - private static final Logger log = Logger.getLogger(ItemLicenseProcessor.class); + private static final Logger log = LogManager.getLogger(ItemLicenseProcessor.class); private final CreativeCommonsService creativeCommonsService = LicenseServiceFactory.getInstance().getCreativeCommonsService(); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLinksetProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLinksetProcessor.java index 2d09e5616171..4e48caf9594f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLinksetProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemLinksetProcessor.java @@ -10,7 +10,8 @@ import java.util.List; import jakarta.servlet.http.HttpServletRequest; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRelationType; import org.dspace.content.Item; @@ -23,7 +24,7 @@ */ public class ItemLinksetProcessor extends ItemSignpostingProcessor { - private static final Logger log = Logger.getLogger(ItemLinksetProcessor.class); + private static final Logger log = LogManager.getLogger(ItemLinksetProcessor.class); private final ConfigurationService configurationService; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java index 49b3612cd92c..f2533a5a9564 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/processor/item/ItemTypeProcessor.java @@ -11,7 +11,8 @@ import jakarta.servlet.http.HttpServletRequest; import org.apache.commons.lang3.StringUtils; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.model.LinksetRelationType; import org.dspace.content.Item; @@ -27,7 +28,7 @@ */ public class ItemTypeProcessor extends ItemSignpostingProcessor { - private static final Logger log = Logger.getLogger(ItemTypeProcessor.class); + private static final Logger log = LogManager.getLogger(ItemTypeProcessor.class); private static final String ABOUT_PAGE_URI = "https://schema.org/AboutPage"; @Autowired diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java index 5b28817c9438..42b1c8184957 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/signposting/service/impl/LinksetServiceImpl.java @@ -13,7 +13,8 @@ import java.util.List; import jakarta.servlet.http.HttpServletRequest; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.rest.security.BitstreamMetadataReadPermissionEvaluatorPlugin; import org.dspace.app.rest.signposting.model.LinksetNode; import org.dspace.app.rest.signposting.processor.bitstream.BitstreamSignpostingProcessor; @@ -37,7 +38,7 @@ @Service public class LinksetServiceImpl implements LinksetService { - private static final Logger log = Logger.getLogger(LinksetServiceImpl.class); + private static final Logger log = LogManager.getLogger(LinksetServiceImpl.class); @Autowired protected ItemService itemService; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/CclicenseValidator.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/CclicenseValidator.java index d0cfd14ec8c5..2273b377fefa 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/CclicenseValidator.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/CclicenseValidator.java @@ -12,8 +12,8 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.List; -import javax.inject.Inject; +import jakarta.inject.Inject; import org.dspace.app.rest.model.ErrorRest; import org.dspace.app.rest.submit.SubmissionService; import org.dspace.app.util.DCInputsReaderException; diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java index 6f71011933c8..d657917f13e3 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java @@ -929,10 +929,10 @@ public void getProcessOutput() throws Exception { getClient(token).perform(get("/api/system/processes/" + process1.getID() + "/output")) .andExpect(status().isOk()) .andExpect(jsonPath("$.name", - is(process1.getName() + process1.getID() + ".log"))) + is(process1.getID() + "-" + process1.getName() + ".log"))) .andExpect(jsonPath("$.type", is("bitstream"))) .andExpect(jsonPath("$.metadata['dc.title'][0].value", - is(process1.getName() + process1.getID() + ".log"))) + is(process1.getID() + "-" + process1.getName() + ".log"))) .andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value", is("script_output"))); @@ -942,10 +942,10 @@ public void getProcessOutput() throws Exception { .perform(get("/api/system/processes/" + process1.getID() + "/output")) .andExpect(status().isOk()) .andExpect(jsonPath("$.name", - is(process1.getName() + process1.getID() + ".log"))) + is(process1.getID() + "-" + process1.getName() + ".log"))) .andExpect(jsonPath("$.type", is("bitstream"))) .andExpect(jsonPath("$.metadata['dc.title'][0].value", - is(process1.getName() + process1.getID() + ".log"))) + is(process1.getID() + "-" + process1.getName() + ".log"))) .andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value", is("script_output"))); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java index 0722949ffd0b..32e1fcb51bfc 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java @@ -8,6 +8,7 @@ package org.dspace.app.rest; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; +import static org.dspace.app.rest.matcher.SubmissionDefinitionsMatcher.matchSubmissionConfig; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; @@ -17,8 +18,12 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; +import java.util.List; + import org.dspace.app.rest.matcher.SubmissionDefinitionsMatcher; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.app.util.SubmissionConfig; +import org.dspace.app.util.SubmissionConfigReader; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; import org.dspace.content.Collection; @@ -31,12 +36,6 @@ * (Class has to start or end with IT to be picked up by the failsafe plugin) */ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegrationTest { - - // The total number of expected submission definitions is referred to in multiple tests and assertions as - // is the last page (totalDefinitions - 1) - // This integer should be maintained along with any changes to item-submissions.xml - private static final int totalDefinitions = 12; - @Test public void findAll() throws Exception { //When we call the root endpoint as anonymous user @@ -96,7 +95,7 @@ public void findDefault() throws Exception { //The status has to be 200 OK .andExpect(status().isOk()) .andExpect(jsonPath("$", SubmissionDefinitionsMatcher.matchFullEmbeds())) - .andExpect(jsonPath("$", SubmissionDefinitionsMatcher.matchLinks())) + .andExpect(jsonPath("$", SubmissionDefinitionsMatcher.matchLinks("traditional"))) //We expect the content type to be "application/hal+json;charset=UTF-8" .andExpect(content().contentType(contentType)) @@ -245,13 +244,17 @@ public void findSections() throws Exception { @Test public void findAllPaginationTest() throws Exception { + List definitions = new SubmissionConfigReader().getAllSubmissionConfigs(100, 0); + int totalDefinitions = definitions.size(); + String tokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") .param("size", "1") .param("page", "0")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("traditional"))) + .andExpect(jsonPath("$._embedded.submissiondefinitions[0]", matchSubmissionConfig(definitions.get(0)))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), Matchers.containsString("page=0"), Matchers.containsString("size=1")))) @@ -274,7 +277,7 @@ public void findAllPaginationTest() throws Exception { .param("page", "1")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("languagetestprocess"))) + .andExpect(jsonPath("$._embedded.submissiondefinitions[0]", matchSubmissionConfig(definitions.get(1)))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), Matchers.containsString("page=0"), Matchers.containsString("size=1")))) @@ -300,7 +303,7 @@ public void findAllPaginationTest() throws Exception { .param("page", "2")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("extractiontestprocess"))) + .andExpect(jsonPath("$._embedded.submissiondefinitions[0]", matchSubmissionConfig(definitions.get(2)))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), Matchers.containsString("page=0"), Matchers.containsString("size=1")))) @@ -326,7 +329,7 @@ public void findAllPaginationTest() throws Exception { .param("page", "3")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("qualdroptest"))) + .andExpect(jsonPath("$._embedded.submissiondefinitions[0]", matchSubmissionConfig(definitions.get(3)))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), Matchers.containsString("page=0"), Matchers.containsString("size=1")))) @@ -352,7 +355,7 @@ public void findAllPaginationTest() throws Exception { .param("page", "4")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("typebindtest"))) + .andExpect(jsonPath("$._embedded.submissiondefinitions[0]", matchSubmissionConfig(definitions.get(4)))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), Matchers.containsString("page=0"), Matchers.containsString("size=1")))) @@ -378,33 +381,7 @@ public void findAllPaginationTest() throws Exception { .param("page", "5")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("accessConditionNotDiscoverable"))) - .andExpect(jsonPath("$._links.first.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=0"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=4"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.next.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=6"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.self.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=5"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.last.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=" + (totalDefinitions - 1)), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(totalDefinitions))) - .andExpect(jsonPath("$.page.totalPages", is(totalDefinitions))) - .andExpect(jsonPath("$.page.number", is(5))); - - getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") - .param("size", "1") - .param("page", "5")) - .andExpect(status().isOk()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("accessConditionNotDiscoverable"))) + .andExpect(jsonPath("$._embedded.submissiondefinitions[0]", matchSubmissionConfig(definitions.get(5)))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), Matchers.containsString("page=0"), Matchers.containsString("size=1")))) @@ -425,158 +402,31 @@ public void findAllPaginationTest() throws Exception { .andExpect(jsonPath("$.page.totalPages", is(totalDefinitions))) .andExpect(jsonPath("$.page.number", is(5))); - getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") - .param("size", "1") - .param("page", "6")) - .andExpect(status().isOk()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("test-hidden"))) - .andExpect(jsonPath("$._links.first.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=0"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=5"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.next.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=7"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.self.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=6"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.last.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=" + (totalDefinitions - 1)), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(totalDefinitions))) - .andExpect(jsonPath("$.page.totalPages", is(totalDefinitions))) - .andExpect(jsonPath("$.page.number", is(6))); - - getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") - .param("size", "1") - .param("page", "7")) - .andExpect(status().isOk()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("topcommunitytest"))) - .andExpect(jsonPath("$._links.first.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=0"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=6"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.next.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=8"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.self.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=7"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.last.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=" + (totalDefinitions - 1)), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(totalDefinitions))) - .andExpect(jsonPath("$.page.totalPages", is(totalDefinitions))) - .andExpect(jsonPath("$.page.number", is(7))); - - getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") - .param("size", "1") - .param("page", "8")) - .andExpect(status().isOk()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("subcommunitytest"))) - .andExpect(jsonPath("$._links.first.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=0"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=7"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.next.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=9"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.self.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=8"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.last.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=" + (totalDefinitions - 1)), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(totalDefinitions))) - .andExpect(jsonPath("$.page.totalPages", is(totalDefinitions))) - .andExpect(jsonPath("$.page.number", is(8))); - - getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") - .param("size", "1") - .param("page", "9")) - .andExpect(status().isOk()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("collectiontest"))) - .andExpect(jsonPath("$._links.first.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=0"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=8"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.next.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=10"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.self.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=9"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.last.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=" + (totalDefinitions - 1)), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(totalDefinitions))) - .andExpect(jsonPath("$.page.totalPages", is(totalDefinitions))) - .andExpect(jsonPath("$.page.number", is(9))); + // Skip ahead to last page getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") - .param("size", "1") - .param("page", "10")) + .param("size", "1") + .param("page", "" + (totalDefinitions - 1))) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("entitytypetest"))) + .andExpect(jsonPath("$._embedded.submissiondefinitions[0]", + matchSubmissionConfig(definitions.get(totalDefinitions - 1)))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), Matchers.containsString("page=0"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=9"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.next.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=11"), Matchers.containsString("size=1")))) + Matchers.containsString("page=" + (totalDefinitions - 2)), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.self.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=10"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), Matchers.containsString("page=" + (totalDefinitions - 1)), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(totalDefinitions))) - .andExpect(jsonPath("$.page.totalPages", is(totalDefinitions))) - .andExpect(jsonPath("$.page.number", is(10))); - - getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") - .param("size", "1") - .param("page", "11")) - .andExpect(status().isOk()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("test-duplicate-detection"))) - .andExpect(jsonPath("$._links.first.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=0"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=10"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.self.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=11"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), Matchers.containsString("page=" + (totalDefinitions - 1)), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) .andExpect(jsonPath("$.page.totalElements", is(totalDefinitions))) .andExpect(jsonPath("$.page.totalPages", is(totalDefinitions))) - .andExpect(jsonPath("$.page.number", is(11))); + .andExpect(jsonPath("$.page.number", is(totalDefinitions - 1))); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/UUIDLookupRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/UUIDLookupRestControllerIT.java index 8a6debce3ec7..3b0821645861 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/UUIDLookupRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/UUIDLookupRestControllerIT.java @@ -17,6 +17,8 @@ import org.apache.commons.codec.CharEncoding; import org.apache.commons.io.IOUtils; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; @@ -31,6 +33,7 @@ import org.dspace.eperson.Group; import org.junit.Ignore; import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; /** * Integration test for the UUIDLookup endpoint @@ -39,6 +42,9 @@ */ public class UUIDLookupRestControllerIT extends AbstractControllerIntegrationTest { + @Autowired + ResourcePolicyService resourcePolicyService; + @Test /** * Test the proper redirection of a site's uuid @@ -307,4 +313,35 @@ public void testMissingIdentifierParameter() throws Exception { .andExpect(status().isUnprocessableEntity()); } + @Test + public void testUnauthorized() throws Exception { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context) + .build(); + for (ResourcePolicy rp : resourcePolicyService.find(context, community)) { + resourcePolicyService.delete(context, rp); + } + context.restoreAuthSystemState(); + + getClient().perform(get("/api/dso/find") + .param("uuid", community.getID().toString())) + .andExpect(status().isUnauthorized()); + } + + @Test + public void testForbidden() throws Exception { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context) + .build(); + for (ResourcePolicy rp : resourcePolicyService.find(context, community)) { + resourcePolicyService.delete(context, rp); + } + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/dso/find") + .param("uuid", community.getID().toString())) + .andExpect(status().isForbidden()); + } + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/SubmissionDefinitionsMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/SubmissionDefinitionsMatcher.java index 398097db6fba..86f1c7830a0a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/SubmissionDefinitionsMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/SubmissionDefinitionsMatcher.java @@ -13,6 +13,7 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.is; +import org.dspace.app.util.SubmissionConfig; import org.hamcrest.Matcher; /** @@ -25,7 +26,7 @@ private SubmissionDefinitionsMatcher() { } public static Matcher matchSubmissionDefinition(boolean isDefault, String name, String id) { return allOf( matchProperties(isDefault, name, id), - matchLinks() + matchLinks(id) ); } @@ -42,8 +43,8 @@ public static Matcher matchFullEmbeds() { /** * Gets a matcher for all expected links. */ - public static Matcher matchLinks() { - return HalMatcher.matchLinks(REST_SERVER_URL + "config/submissiondefinitions/traditional", + public static Matcher matchLinks(String id) { + return HalMatcher.matchLinks(REST_SERVER_URL + "config/submissiondefinitions/" + id, "collections", "sections", "self" @@ -61,4 +62,12 @@ public static Matcher matchProperties(boolean isDefault, String is(REST_SERVER_URL + "config/submissiondefinitions/" + id + "/sections")) ); } + + public static Matcher matchSubmissionConfig(SubmissionConfig config) { + return matchSubmissionDefinition( + config.isDefaultConf(), + config.getSubmissionName(), + config.getSubmissionName() + ); + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/scripts/handler/impl/TestDSpaceRunnableHandler.java b/dspace-server-webapp/src/test/java/org/dspace/app/scripts/handler/impl/TestDSpaceRunnableHandler.java index 1b5b3fa7ac1a..aced81cbdfdb 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/scripts/handler/impl/TestDSpaceRunnableHandler.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/scripts/handler/impl/TestDSpaceRunnableHandler.java @@ -7,6 +7,9 @@ */ package org.dspace.app.scripts.handler.impl; +import java.util.ArrayList; +import java.util.List; + import org.dspace.scripts.handler.impl.CommandLineDSpaceRunnableHandler; /** @@ -17,6 +20,12 @@ public class TestDSpaceRunnableHandler extends CommandLineDSpaceRunnableHandler private Exception exception = null; + private final List infoMessages = new ArrayList<>(); + + private final List errorMessages = new ArrayList<>(); + + private final List warningMessages = new ArrayList<>(); + /** * We're overriding this method so that we can stop the script from doing the System.exit() if * an exception within the script is thrown @@ -33,4 +42,34 @@ public void handleException(String message, Exception e) { public Exception getException() { return exception; } + + @Override + public void logInfo(String message) { + super.logInfo(message); + infoMessages.add(message); + } + + @Override + public void logWarning(String message) { + super.logWarning(message); + warningMessages.add(message); + } + + @Override + public void logError(String message) { + super.logError(message); + errorMessages.add(message); + } + + public List getInfoMessages() { + return infoMessages; + } + + public List getErrorMessages() { + return errorMessages; + } + + public List getWarningMessages() { + return warningMessages; + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java b/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java index 3e40a8559482..8c0744a09cce 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/curate/CurationScriptIT.java @@ -9,6 +9,8 @@ import static com.jayway.jsonpath.JsonPath.read; import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @@ -21,6 +23,7 @@ import java.util.stream.Collectors; import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; import org.dspace.app.rest.converter.DSpaceRunnableParameterConverter; import org.dspace.app.rest.matcher.ProcessMatcher; import org.dspace.app.rest.model.ParameterValueRest; @@ -28,6 +31,7 @@ import org.dspace.app.rest.model.ScriptRest; import org.dspace.app.rest.projection.Projection; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; import org.dspace.builder.EPersonBuilder; @@ -41,7 +45,9 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.eperson.EPerson; import org.dspace.scripts.DSpaceCommandLineParameter; +import org.dspace.scripts.DSpaceRunnable; import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.scripts.factory.ScriptServiceFactory; import org.dspace.scripts.service.ScriptService; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; @@ -640,4 +646,65 @@ public void securityCurateTest() throws Exception { ProcessBuilder.deleteProcess(idItemRef.get()); } } + + @Test + public void testURLRedirectCurateTest() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + // Value not starting with http or https + .withMetadata("dc", "identifier", "uri", "demo.dspace.org/home") + // MetadataValueLinkChecker uri field with regular link + .withMetadata("dc", "description", null, "https://google.com") + // MetadataValueLinkChecker uri field with redirect link + .withMetadata("dc", "description", "uri", "https://demo7.dspace.org/handle/123456789/1") + // MetadataValueLinkChecker uri field with non resolving link + .withMetadata("dc", "description", "uri", "https://www.atmire.com/broken-link") + .withSubject("ExtraEntry") + .build(); + + String[] args = new String[] {"curate", "-t", "checklinks", "-i", publicItem1.getHandle()}; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + + ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService(); + ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]); + + DSpaceRunnable script = null; + if (scriptConfiguration != null) { + script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); + } + if (script != null) { + script.initialize(args, handler, admin); + script.run(); + } + + // field that should be ignored + assertFalse(checkIfInfoTextLoggedByHandler(handler, "demo.dspace.org/home")); + // redirect links in field that should not be ignored (https) => expect OK + assertTrue(checkIfInfoTextLoggedByHandler(handler, "https://demo7.dspace.org/handle/123456789/1 = 200 - OK")); + // regular link in field that should not be ignored (http) => expect OK + assertTrue(checkIfInfoTextLoggedByHandler(handler, "https://google.com = 200 - OK")); + // nonexistent link in field that should not be ignored => expect 404 + assertTrue(checkIfInfoTextLoggedByHandler(handler, "https://www.atmire.com/broken-link = 404 - FAILED")); + } + + boolean checkIfInfoTextLoggedByHandler(TestDSpaceRunnableHandler handler, String text) { + for (String message: handler.getInfoMessages()) { + if (StringUtils.containsIgnoreCase(message, text)) { + return true; + } + } + return false; + } + } diff --git a/dspace-services/pom.xml b/dspace-services/pom.xml index 52e975b5af18..852d6a648c37 100644 --- a/dspace-services/pom.xml +++ b/dspace-services/pom.xml @@ -90,6 +90,13 @@ spring-context-support ${spring.version} compile + + + + org.springframework + spring-jcl + + org.apache.commons @@ -121,23 +128,6 @@ junit provided - - - org.mortbay.jetty - jetty - 6.1.26 - test - - - org.mortbay.jetty - jetty-servlet-tester - 6.1.26 - test - - - org.apache.commons - commons-collections4 - org.apache.commons commons-configuration2 @@ -157,11 +147,5 @@ jakarta.annotation-api - - org.springframework.boot - spring-boot-starter-log4j2 - ${spring-boot.version} - - diff --git a/dspace-sword/pom.xml b/dspace-sword/pom.xml index 60f7a5327462..940227a9354b 100644 --- a/dspace-sword/pom.xml +++ b/dspace-sword/pom.xml @@ -25,15 +25,6 @@ - - - - org.dspace dspace-api @@ -55,14 +46,14 @@ org.springframework.boot spring-boot-starter-logging + + + org.springframework + spring-jcl + - - jaxen - jaxen - - org.apache.httpcomponents httpclient @@ -82,24 +73,12 @@ org.apache.logging.log4j log4j-api - - org.apache.logging.log4j - log4j-core - - - org.apache.logging.log4j - log4j-web - xom xom 1.3.9 - - commons-io - commons-io - diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml index 2ed6aa168001..ac361b33c684 100644 --- a/dspace-swordv2/pom.xml +++ b/dspace-swordv2/pom.xml @@ -78,6 +78,11 @@ org.springframework.boot spring-boot-starter-logging + + + org.springframework + spring-jcl + @@ -86,14 +91,6 @@ org.apache.logging.log4j log4j-api - - org.apache.logging.log4j - log4j-core - - - org.apache.logging.log4j - log4j-web - diff --git a/dspace/bin/start-handle-server b/dspace/bin/start-handle-server index 6d5c9a4b4406..b2af9cf759b8 100755 --- a/dspace/bin/start-handle-server +++ b/dspace/bin/start-handle-server @@ -36,6 +36,6 @@ rm -f $HANDLEDIR/txns/lock # does not support more than one JVM writing to the same rolling log. nohup java $JAVA_OPTS -classpath `$BINDIR/dspace classpath` \ -Ddspace.log.init.disable=true \ - -Dlog4j.configuration=log4j-handle-plugin.properties \ + -Dlog4j2.configurationFile=log4j2-handle-plugin.xml \ net.handle.server.Main $HANDLEDIR \ > $LOGDIR/handle-server.log 2>&1 & diff --git a/dspace/bin/start-handle-server.bat b/dspace/bin/start-handle-server.bat index caf6ff3eef12..8280e2b442f1 100644 --- a/dspace/bin/start-handle-server.bat +++ b/dspace/bin/start-handle-server.bat @@ -51,7 +51,7 @@ echo. echo NOTE: If you want to run the Handle Server as a backend process, re-execute this script echo using the Windows "start" command. For example, "start /B start-handle-server.bat" echo. -java %JAVA_OPTS% -cp "%DSPACE_CLASSPATH%" -Ddspace.log.init.disable=true -Dlog4j.configuration=log4j-handle-plugin.properties net.handle.server.Main %HANDLEDIR% >> "%LOGDIR%/handle-server.log" +java %JAVA_OPTS% -cp "%DSPACE_CLASSPATH%" -Ddspace.log.init.disable=true -Dlog4j2.configurationFile=log4j2-handle-plugin.xml net.handle.server.Main %HANDLEDIR% >> "%LOGDIR%/handle-server.log" REM Clean up DSPACE_CLASSPATH variable set DSPACE_CLASSPATH= diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 0d8086751d0d..675e2480c4b8 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -519,6 +519,13 @@ filter.org.dspace.app.mediafilter.PDFBoxThumbnail.inputFormats = Adobe PDF # text ("filter-media -f" ) and then reindex your site ("index-discovery -b"). #textextractor.use-temp-file = false +# Maximum size of a record buffer for text extraction. Set this if you are +# seeing RecordFormatException calling out excessive array length from +# 'dspace filter-media'. It is likely that you will need to increase the +# size of the Java heap if you greatly increase this value -- see JAVA_OPTS +# in 'bin/dspace' or 'bin/dspace/bat'. +#textextractor.max-array = 100000000 + # Custom settings for ImageMagick Thumbnail Filters # ImageMagick and GhostScript must be installed on the server, set the path to ImageMagick and GhostScript executable # http://www.imagemagick.org/ diff --git a/dspace/config/log4j-handle-plugin.properties b/dspace/config/log4j-handle-plugin.properties deleted file mode 100644 index 44d39fb1bdf0..000000000000 --- a/dspace/config/log4j-handle-plugin.properties +++ /dev/null @@ -1,34 +0,0 @@ -########################################################################### -# log4j-handle-plugin.properties -# -# This is the log4j configuration file for the embedded DSpace Handle server, -# writing daily rolling logs. We cannot simply write to the same logs, since -# log4j does not support more than one JVM writing to the same rolling log. -########################################################################### - -# VARIABLES: -# The following variables can be used to easily tweak the default log4j settings. -# These variables are used by the log4j config / appenders later in this file. - -# log.dir -# Default log file directory for DSpace. Defaults to the 'log' subdirectory -# under [dspace.dir]. NOTE: The value of 'dspace.dir' will be replaced by -# its value in your configuration when DSpace is deployed (via Ant). -log.dir=${dspace.dir}/log - -# Set root category priority to INFO and its only appender to A1. -log4j.rootCategory=INFO, A1 - -# A1 is set to be a DailyRollingFileAppender. -log4j.appender.A1=org.apache.log4j.DailyRollingFileAppender -log4j.appender.A1.File=${log.dir}/handle-plugin.log -log4j.appender.A1.DatePattern='.'yyyy-MM-dd - -# A1 uses PatternLayout. -log4j.appender.A1.layout=org.apache.log4j.PatternLayout -log4j.appender.A1.layout.ConversionPattern=%d %-5p %c @ %m%n - - -# block passwords from being exposed in Axis logs. -# (DEBUG exposes passwords in Basic Auth) -log4j.logger.org.apache.axis.handlers.http.HTTPAuthHandler=INFO \ No newline at end of file diff --git a/dspace/config/log4j2-handle-plugin.xml b/dspace/config/log4j2-handle-plugin.xml new file mode 100644 index 000000000000..5f72e05c7303 --- /dev/null +++ b/dspace/config/log4j2-handle-plugin.xml @@ -0,0 +1,48 @@ + + + + + + + ${log4j:configParentLocation}/../log + + + + + + + + + yyyy-MM-dd + + + + + + + + + + + + + + diff --git a/dspace/config/modules/curate.cfg b/dspace/config/modules/curate.cfg index 1d7b87960df1..6e75738de543 100644 --- a/dspace/config/modules/curate.cfg +++ b/dspace/config/modules/curate.cfg @@ -26,3 +26,6 @@ curate.taskqueue.dir = ${dspace.dir}/ctqueues # (optional) directory location of scripted (non-java) tasks # curate.script.dir = ${dspace.dir}/ctscripts + +# Maximum amount of redirects set to 0 for none and -1 for unlimited +curate.checklinks.max-redirect = 0 diff --git a/dspace/modules/server-boot/pom.xml b/dspace/modules/server-boot/pom.xml index 4a8fd8f966ff..d95e8c5c595b 100644 --- a/dspace/modules/server-boot/pom.xml +++ b/dspace/modules/server-boot/pom.xml @@ -27,12 +27,13 @@ additions - org.dspace - dspace-server-webapp + org.dspace.modules + server + classes - org.apache.solr - solr-solrj + org.dspace + dspace-server-webapp diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index f9b1006e7453..6770a8f952fd 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -27,7 +27,7 @@ maven-dependency-plugin - unpack + unpack-additions prepare-package unpack-dependencies @@ -42,6 +42,20 @@ META-INF/** + + unpack-server + prepare-package + + unpack-dependencies + + + runtime + org.dspace + dspace-server-webapp + **/static/**,**/*.properties + ${project.build.directory}/additions + + @@ -49,6 +63,7 @@ maven-war-plugin false + true true diff --git a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile index aabf87df3eda..f314fdb79e80 100644 --- a/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile +++ b/dspace/src/main/docker/dspace-postgres-pgcrypto-curl/Dockerfile @@ -13,11 +13,11 @@ ARG POSTGRES_VERSION=15 ARG POSTGRES_PASSWORD=dspace -FROM postgres:${POSTGRES_VERSION} +FROM docker.io/postgres:${POSTGRES_VERSION} -ENV POSTGRES_DB dspace -ENV POSTGRES_USER dspace -ENV POSTGRES_PASSWORD ${POSTGRES_PASSWORD} +ENV POSTGRES_DB=dspace +ENV POSTGRES_USER=dspace +ENV POSTGRES_PASSWORD=${POSTGRES_PASSWORD} # Install curl which is necessary to load SQL file RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/* diff --git a/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile b/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile index 2298cd4e76ea..4f639f361024 100644 --- a/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile +++ b/dspace/src/main/docker/dspace-postgres-pgcrypto/Dockerfile @@ -13,11 +13,11 @@ ARG POSTGRES_VERSION=15 ARG POSTGRES_PASSWORD=dspace -FROM postgres:${POSTGRES_VERSION} +FROM docker.io/postgres:${POSTGRES_VERSION} -ENV POSTGRES_DB dspace -ENV POSTGRES_USER dspace -ENV POSTGRES_PASSWORD ${POSTGRES_PASSWORD} +ENV POSTGRES_DB=dspace +ENV POSTGRES_USER=dspace +ENV POSTGRES_PASSWORD=${POSTGRES_PASSWORD} # Copy over script which will initialize database and install pgcrypto extension COPY install-pgcrypto.sh /docker-entrypoint-initdb.d/ diff --git a/dspace/src/main/docker/dspace-shibboleth/Dockerfile b/dspace/src/main/docker/dspace-shibboleth/Dockerfile index 79f2921bd7d6..15a436d7b6f7 100644 --- a/dspace/src/main/docker/dspace-shibboleth/Dockerfile +++ b/dspace/src/main/docker/dspace-shibboleth/Dockerfile @@ -10,15 +10,15 @@ # Build from Ubuntu as it has easy Apache tooling (e.g. a2enmod script is debian only). # Apache & mod_shib are required for DSpace to act as an SP # See also https://wiki.lyrasis.org/display/DSDOC7x/Authentication+Plugins#AuthenticationPlugins-ShibbolethAuthentication -FROM ubuntu:20.04 +FROM docker.io/ubuntu:20.04 # Apache ENVs (default values) -ENV APACHE_RUN_USER www-data -ENV APACHE_RUN_GROUP www-data -ENV APACHE_LOCK_DIR /var/lock/apache2 -ENV APACHE_LOG_DIR /var/log/apache2 -ENV APACHE_PID_FILE /var/run/apache2/apache2.pid -ENV APACHE_SERVER_NAME localhost +ENV APACHE_RUN_USER=www-data +ENV APACHE_RUN_GROUP=www-data +ENV APACHE_LOCK_DIR=/var/lock/apache2 +ENV APACHE_LOG_DIR=/var/log/apache2 +ENV APACHE_PID_FILE=/var/run/apache2/apache2.pid +ENV APACHE_SERVER_NAME=localhost # Ensure Apache2, mod_shib & shibboleth daemon are installed. # Also install ssl-cert to provide a local SSL cert for use with Apache diff --git a/dspace/src/main/docker/dspace-solr/Dockerfile b/dspace/src/main/docker/dspace-solr/Dockerfile index 289949922cc1..241a4345b305 100644 --- a/dspace/src/main/docker/dspace-solr/Dockerfile +++ b/dspace/src/main/docker/dspace-solr/Dockerfile @@ -12,7 +12,7 @@ ARG SOLR_VERSION=8.11 -FROM solr:${SOLR_VERSION}-slim +FROM docker.io/solr:${SOLR_VERSION}-slim ENV AUTHORITY_CONFIGSET_PATH=/opt/solr/server/solr/configsets/authority/conf \ OAI_CONFIGSET_PATH=/opt/solr/server/solr/configsets/oai/conf \ diff --git a/pom.xml b/pom.xml index fa401be71b8c..827720bbfaa4 100644 --- a/pom.xml +++ b/pom.xml @@ -19,42 +19,43 @@ 17 - 6.1.14 + 6.2.0 3.3.5 - 6.3.4 + 6.4.0 6.4.8.Final 8.0.1.Final 42.7.4 - 10.20.1 + 10.22.0 8.11.4 3.10.8 - 2.35.1 + 2.36.0 - 2.18.1 - 2.18.1 + 2.18.2 + 2.18.2 2.1.1 4.0.2 4.0.5 1.1.1 9.4.56.v20240826 - 2.23.1 - 2.0.31 + 2.24.3 + 2.0.32 1.19.0 - 2.0.11 + + 2.0.16 2.9.2 - 1.78.1 + 1.79 8.0.1 - 3.1.5 + 3.1.9 2.9.0 - 9.45 + 9.47 @@ -147,6 +148,7 @@ true -XDcompilePolicy=simple + --should-stop=ifError=FLOW -Xplugin:ErrorProne -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED @@ -295,14 +297,14 @@ com.puppycrawl.tools checkstyle - 10.19.0 + 10.21.0 com.github.spotbugs spotbugs-maven-plugin - 4.8.6.5 + 4.8.6.6 Max Low @@ -370,7 +372,7 @@ org.apache.maven.plugins maven-javadoc-plugin - 3.11.1 + 3.11.2 false @@ -692,7 +694,7 @@ org.codehaus.mojo license-maven-plugin - 2.4.0 + 2.5.0 false @@ -1031,6 +1033,12 @@ additions 9.0-SNAPSHOT + + org.dspace.modules + server + classes + 9.0-SNAPSHOT + org.dspace @@ -1124,14 +1132,14 @@ org.jboss.logging jboss-logging - 3.4.3.Final + 3.6.1.Final org.antlr antlr4-runtime - 4.13.1 + 4.13.2 @@ -1155,12 +1163,15 @@ org.springframework spring-orm ${spring.version} + + + + org.springframework + spring-jcl + + - - org.swordapp - sword-common - 1.1 - + spring-core @@ -1306,7 +1317,7 @@ org.apache.james apache-mime4j-core - 0.8.10 + 0.8.11 @@ -1334,7 +1345,7 @@ org.checkerframework checker-qual - 3.48.2 + 3.48.3 - - commons-collections - commons-collections - 3.2.2 - org.apache.commons commons-configuration2 @@ -1485,7 +1488,7 @@ org.apache.commons commons-dbcp2 - 2.12.0 + 2.13.0 @@ -1497,7 +1500,7 @@ commons-io commons-io - 2.17.0 + 2.18.0 org.apache.commons @@ -1529,7 +1532,7 @@ org.apache.commons commons-text - 1.12.0 + 1.13.0 commons-validator @@ -1539,13 +1542,19 @@ joda-time joda-time - 2.12.5 + 2.13.0 jakarta.activation jakarta.activation-api 2.1.3 + + + jakarta.inject + jakarta.inject-api + 2.0.1 + jakarta.mail @@ -1587,36 +1596,17 @@ log4j-api ${log4j.version} - - org.apache.logging.log4j - log4j-1.2-api - ${log4j.version} - org.apache.logging.log4j log4j-core ${log4j.version} + org.apache.logging.log4j - log4j-web + log4j-slf4j2-impl ${log4j.version} - - org.apache.logging.log4j - log4j-slf4j-impl - ${log4j.version} - - - org.apache.logging.log4j - log4j-jul - ${log4j.version} - - - org.slf4j - jul-to-slf4j - ${slf4j.version} - org.apache.pdfbox @@ -1678,16 +1668,6 @@ slf4j-api ${slf4j.version} - - org.slf4j - slf4j-jdk14 - ${slf4j.version} - - - org.slf4j - log4j-over-slf4j - ${slf4j.version} - com.google.errorprone @@ -1739,36 +1719,6 @@ google-api-services-analytics v3-rev145-1.23.0 - - com.google.api-client - google-api-client - 1.23.0 - - - com.google.http-client - google-http-client - 1.23.0 - - - com.google.http-client - google-http-client-jackson2 - 1.23.0 - - - jackson-core - com.fasterxml.jackson.core - - - jackson-databind - com.fasterxml.jackson.core - - - - - com.google.oauth-client - google-oauth-client - 1.33.3 - @@ -1808,7 +1758,7 @@ com.google.guava guava - 32.0.0-jre + 32.1.3-jre